1/* Generated automatically by the program `genflags'
2 from the machine description file `md'. */
3
4#ifndef GCC_INSN_FLAGS_H
5#define GCC_INSN_FLAGS_H
6
7#define HAVE_x86_fnstsw_1 (TARGET_80387)
8#define HAVE_x86_sahf_1 (TARGET_SAHF)
9#define HAVE_insvhi_1 1
10#define HAVE_insvsi_1 1
11#define HAVE_insvdi_1 (TARGET_64BIT)
12#define HAVE_swapxf (TARGET_80387)
13#define HAVE_zero_extendqidi2 (TARGET_64BIT)
14#define HAVE_zero_extendhidi2 (TARGET_64BIT)
15#define HAVE_zero_extendqisi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
16#define HAVE_zero_extendhisi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
17#define HAVE_zero_extendqihi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
18#define HAVE_extendsidi2_1 (!TARGET_64BIT)
19#define HAVE_extendqidi2 (TARGET_64BIT)
20#define HAVE_extendhidi2 (TARGET_64BIT)
21#define HAVE_extendhisi2 1
22#define HAVE_extendqisi2 1
23#define HAVE_extendqihi2 1
24#define HAVE_truncxfsf2_i387_noop (TARGET_80387 && flag_unsafe_math_optimizations)
25#define HAVE_truncxfdf2_i387_noop (TARGET_80387 && flag_unsafe_math_optimizations)
26#define HAVE_fix_truncsfsi_sse (SSE_FLOAT_MODE_P (SFmode) \
27 && (!TARGET_FISTTP || TARGET_SSE_MATH))
28#define HAVE_fix_truncsfdi_sse ((SSE_FLOAT_MODE_P (SFmode) \
29 && (!TARGET_FISTTP || TARGET_SSE_MATH)) && (TARGET_64BIT))
30#define HAVE_fix_truncdfsi_sse (SSE_FLOAT_MODE_P (DFmode) \
31 && (!TARGET_FISTTP || TARGET_SSE_MATH))
32#define HAVE_fix_truncdfdi_sse ((SSE_FLOAT_MODE_P (DFmode) \
33 && (!TARGET_FISTTP || TARGET_SSE_MATH)) && (TARGET_64BIT))
34#define HAVE_fix_trunchi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
35 && TARGET_FISTTP \
36 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
37 && (TARGET_64BIT || HImode != DImode)) \
38 && TARGET_SSE_MATH) \
39 && can_create_pseudo_p ())
40#define HAVE_fix_truncsi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
41 && TARGET_FISTTP \
42 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
43 && (TARGET_64BIT || SImode != DImode)) \
44 && TARGET_SSE_MATH) \
45 && can_create_pseudo_p ())
46#define HAVE_fix_truncdi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
47 && TARGET_FISTTP \
48 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
49 && (TARGET_64BIT || DImode != DImode)) \
50 && TARGET_SSE_MATH) \
51 && can_create_pseudo_p ())
52#define HAVE_fix_trunchi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
53 && TARGET_FISTTP \
54 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
55 && (TARGET_64BIT || HImode != DImode)) \
56 && TARGET_SSE_MATH))
57#define HAVE_fix_truncsi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
58 && TARGET_FISTTP \
59 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
60 && (TARGET_64BIT || SImode != DImode)) \
61 && TARGET_SSE_MATH))
62#define HAVE_fix_truncdi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
63 && TARGET_FISTTP \
64 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
65 && (TARGET_64BIT || DImode != DImode)) \
66 && TARGET_SSE_MATH))
67#define HAVE_fix_trunchi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
68 && TARGET_FISTTP \
69 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
70 && (TARGET_64BIT || HImode != DImode)) \
71 && TARGET_SSE_MATH))
72#define HAVE_fix_truncsi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
73 && TARGET_FISTTP \
74 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
75 && (TARGET_64BIT || SImode != DImode)) \
76 && TARGET_SSE_MATH))
77#define HAVE_fix_truncdi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
78 && TARGET_FISTTP \
79 && !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
80 && (TARGET_64BIT || DImode != DImode)) \
81 && TARGET_SSE_MATH))
82#define HAVE_fix_truncdi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
83 && !TARGET_FISTTP \
84 && !(TARGET_64BIT && SSE_FLOAT_MODE_P (GET_MODE (operands[1]))))
85#define HAVE_fix_truncdi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
86 && !TARGET_FISTTP \
87 && !(TARGET_64BIT && SSE_FLOAT_MODE_P (GET_MODE (operands[1]))))
88#define HAVE_fix_trunchi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
89 && !TARGET_FISTTP \
90 && !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
91#define HAVE_fix_truncsi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
92 && !TARGET_FISTTP \
93 && !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
94#define HAVE_fix_trunchi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
95 && !TARGET_FISTTP \
96 && !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
97#define HAVE_fix_truncsi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
98 && !TARGET_FISTTP \
99 && !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
100#define HAVE_x86_fnstcw_1 (TARGET_80387)
101#define HAVE_x86_fldcw_1 (TARGET_80387)
102#define HAVE_floathisf2 (TARGET_80387 \
103 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
104 || TARGET_MIX_SSE_I387))
105#define HAVE_floathidf2 (TARGET_80387 \
106 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
107 || TARGET_MIX_SSE_I387))
108#define HAVE_floathixf2 (TARGET_80387 \
109 && (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
110 || TARGET_MIX_SSE_I387))
111#define HAVE_floatsixf2 (TARGET_80387)
112#define HAVE_floatdixf2 (TARGET_80387)
113#define HAVE_floatdisf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (SFmode, DImode) \
114 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
115 && !TARGET_64BIT && optimize_function_for_speed_p (cfun))
116#define HAVE_floatdidf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (DFmode, DImode) \
117 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
118 && !TARGET_64BIT && optimize_function_for_speed_p (cfun))
119#define HAVE_floatdixf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (XFmode, DImode) \
120 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
121 && !TARGET_64BIT && optimize_function_for_speed_p (cfun))
122#define HAVE_addsi_1_zext (TARGET_64BIT && ix86_binary_operator_ok (PLUS, SImode, operands))
123#define HAVE_addqi_ext_1 (/* FIXME: without this LRA can't reload this pattern, see PR82524. */ \
124 rtx_equal_p (operands[0], operands[1]))
125#define HAVE_addqi3_carry (ix86_binary_operator_ok (PLUS, QImode, operands))
126#define HAVE_addhi3_carry (ix86_binary_operator_ok (PLUS, HImode, operands))
127#define HAVE_addsi3_carry (ix86_binary_operator_ok (PLUS, SImode, operands))
128#define HAVE_adddi3_carry ((ix86_binary_operator_ok (PLUS, DImode, operands)) && (TARGET_64BIT))
129#define HAVE_addcarrysi (ix86_binary_operator_ok (PLUS, SImode, operands))
130#define HAVE_addcarrydi ((ix86_binary_operator_ok (PLUS, DImode, operands)) && (TARGET_64BIT))
131#define HAVE_subqi3_carry (ix86_binary_operator_ok (MINUS, QImode, operands))
132#define HAVE_subhi3_carry (ix86_binary_operator_ok (MINUS, HImode, operands))
133#define HAVE_subsi3_carry (ix86_binary_operator_ok (MINUS, SImode, operands))
134#define HAVE_subdi3_carry ((ix86_binary_operator_ok (MINUS, DImode, operands)) && (TARGET_64BIT))
135#define HAVE_subborrowsi (ix86_binary_operator_ok (MINUS, SImode, operands))
136#define HAVE_subborrowdi ((ix86_binary_operator_ok (MINUS, DImode, operands)) && (TARGET_64BIT))
137#define HAVE_divmodsi4_1 1
138#define HAVE_divmoddi4_1 (TARGET_64BIT)
139#define HAVE_divmodhiqi3 (TARGET_QIMODE_MATH)
140#define HAVE_udivmodsi4_1 1
141#define HAVE_udivmoddi4_1 (TARGET_64BIT)
142#define HAVE_udivmodhiqi3 (TARGET_QIMODE_MATH)
143#define HAVE_andqi_ext_1 (/* FIXME: without this LRA can't reload this pattern, see PR82524. */ \
144 rtx_equal_p (operands[0], operands[1]))
145#define HAVE_copysignsf3_const ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
146 || (TARGET_SSE && (SFmode == TFmode)))
147#define HAVE_copysigndf3_const ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
148 || (TARGET_SSE && (DFmode == TFmode)))
149#define HAVE_copysigntf3_const ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
150 || (TARGET_SSE && (TFmode == TFmode)))
151#define HAVE_copysignsf3_var ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
152 || (TARGET_SSE && (SFmode == TFmode)))
153#define HAVE_copysigndf3_var ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
154 || (TARGET_SSE && (DFmode == TFmode)))
155#define HAVE_copysigntf3_var ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
156 || (TARGET_SSE && (TFmode == TFmode)))
157#define HAVE_x86_64_shld (TARGET_64BIT)
158#define HAVE_x86_shld 1
159#define HAVE_x86_64_shrd (TARGET_64BIT)
160#define HAVE_x86_shrd 1
161#define HAVE_ashrdi3_cvt (TARGET_64BIT && INTVAL (operands[2]) == 63 \
162 && (TARGET_USE_CLTD || optimize_function_for_size_p (cfun)) \
163 && ix86_binary_operator_ok (ASHIFTRT, DImode, operands))
164#define HAVE_ashrsi3_cvt (INTVAL (operands[2]) == 31 \
165 && (TARGET_USE_CLTD || optimize_function_for_size_p (cfun)) \
166 && ix86_binary_operator_ok (ASHIFTRT, SImode, operands))
167#define HAVE_ix86_rotldi3_doubleword (!TARGET_64BIT)
168#define HAVE_ix86_rotlti3_doubleword (TARGET_64BIT)
169#define HAVE_ix86_rotrdi3_doubleword (!TARGET_64BIT)
170#define HAVE_ix86_rotrti3_doubleword (TARGET_64BIT)
171#define HAVE_setcc_sf_sse (SSE_FLOAT_MODE_P (SFmode))
172#define HAVE_setcc_df_sse (SSE_FLOAT_MODE_P (DFmode))
173#define HAVE_jump 1
174#define HAVE_blockage 1
175#define HAVE_prologue_use 1
176#define HAVE_simple_return_internal (reload_completed)
177#define HAVE_interrupt_return (reload_completed)
178#define HAVE_simple_return_internal_long (reload_completed)
179#define HAVE_simple_return_pop_internal (reload_completed)
180#define HAVE_simple_return_indirect_internal (reload_completed)
181#define HAVE_nop 1
182#define HAVE_nops (reload_completed)
183#define HAVE_pad 1
184#define HAVE_set_got_rex64 (TARGET_64BIT)
185#define HAVE_set_rip_rex64 (TARGET_64BIT)
186#define HAVE_set_got_offset_rex64 (TARGET_LP64)
187#define HAVE_eh_return_internal 1
188#define HAVE_leave (!TARGET_64BIT)
189#define HAVE_leave_rex64 (TARGET_64BIT)
190#define HAVE_split_stack_return 1
191#define HAVE_ffssi2_no_cmove (!TARGET_CMOVE)
192#define HAVE_ctzsi2 1
193#define HAVE_ctzdi2 (TARGET_64BIT)
194#define HAVE_bsr_rex64 (TARGET_64BIT)
195#define HAVE_bsr 1
196#define HAVE_clzsi2_lzcnt (TARGET_LZCNT)
197#define HAVE_clzdi2_lzcnt ((TARGET_LZCNT) && (TARGET_64BIT))
198#define HAVE_tzcnt_si (TARGET_BMI)
199#define HAVE_lzcnt_si (TARGET_LZCNT)
200#define HAVE_tzcnt_di ((((TARGET_64BIT) && (TARGET_BMI)) && (TARGET_64BIT)) && (TARGET_BMI))
201#define HAVE_lzcnt_di ((((TARGET_64BIT) && (TARGET_LZCNT)) && (TARGET_64BIT)) && (TARGET_LZCNT))
202#define HAVE_tzcnt_hi (TARGET_BMI)
203#define HAVE_lzcnt_hi (TARGET_LZCNT)
204#define HAVE_bmi_bextr_si (TARGET_BMI)
205#define HAVE_bmi_bextr_di ((TARGET_BMI) && (TARGET_64BIT))
206#define HAVE_bmi2_pdep_si3 (TARGET_BMI2)
207#define HAVE_bmi2_pdep_di3 ((TARGET_BMI2) && (TARGET_64BIT))
208#define HAVE_bmi2_pext_si3 (TARGET_BMI2)
209#define HAVE_bmi2_pext_di3 ((TARGET_BMI2) && (TARGET_64BIT))
210#define HAVE_tbm_bextri_si (TARGET_TBM)
211#define HAVE_tbm_bextri_di ((TARGET_TBM) && (TARGET_64BIT))
212#define HAVE_popcountsi2 (TARGET_POPCNT)
213#define HAVE_popcountdi2 ((TARGET_POPCNT) && (TARGET_64BIT))
214#define HAVE_popcounthi2 (TARGET_POPCNT)
215#define HAVE_bswaphi_lowpart 1
216#define HAVE_paritydi2_cmp (! TARGET_POPCNT)
217#define HAVE_paritysi2_cmp (! TARGET_POPCNT)
218#define HAVE_truncxfsf2_i387_noop_unspec (TARGET_USE_FANCY_MATH_387)
219#define HAVE_truncxfdf2_i387_noop_unspec (TARGET_USE_FANCY_MATH_387)
220#define HAVE_sqrtxf2 (TARGET_USE_FANCY_MATH_387)
221#define HAVE_sqrt_extendsfxf2_i387 (TARGET_USE_FANCY_MATH_387)
222#define HAVE_sqrt_extenddfxf2_i387 (TARGET_USE_FANCY_MATH_387)
223#define HAVE_fpremxf4_i387 (TARGET_USE_FANCY_MATH_387 \
224 && flag_finite_math_only)
225#define HAVE_fprem1xf4_i387 (TARGET_USE_FANCY_MATH_387 \
226 && flag_finite_math_only)
227#define HAVE_sincosxf3 (TARGET_USE_FANCY_MATH_387 \
228 && flag_unsafe_math_optimizations)
229#define HAVE_sincos_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
230 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
231 || TARGET_MIX_SSE_I387) \
232 && flag_unsafe_math_optimizations)
233#define HAVE_sincos_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
234 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
235 || TARGET_MIX_SSE_I387) \
236 && flag_unsafe_math_optimizations)
237#define HAVE_fptanxf4_i387 (TARGET_USE_FANCY_MATH_387 \
238 && flag_unsafe_math_optimizations \
239 && standard_80387_constant_p (operands[3]) == 2)
240#define HAVE_fptan_extendsfxf4_i387 (TARGET_USE_FANCY_MATH_387 \
241 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
242 || TARGET_MIX_SSE_I387) \
243 && flag_unsafe_math_optimizations \
244 && standard_80387_constant_p (operands[3]) == 2)
245#define HAVE_fptan_extenddfxf4_i387 (TARGET_USE_FANCY_MATH_387 \
246 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
247 || TARGET_MIX_SSE_I387) \
248 && flag_unsafe_math_optimizations \
249 && standard_80387_constant_p (operands[3]) == 2)
250#define HAVE_fpatan_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
251 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
252 || TARGET_MIX_SSE_I387) \
253 && flag_unsafe_math_optimizations)
254#define HAVE_fpatan_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
255 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
256 || TARGET_MIX_SSE_I387) \
257 && flag_unsafe_math_optimizations)
258#define HAVE_fyl2xxf3_i387 (TARGET_USE_FANCY_MATH_387 \
259 && flag_unsafe_math_optimizations)
260#define HAVE_fyl2x_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
261 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
262 || TARGET_MIX_SSE_I387) \
263 && flag_unsafe_math_optimizations)
264#define HAVE_fyl2x_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
265 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
266 || TARGET_MIX_SSE_I387) \
267 && flag_unsafe_math_optimizations)
268#define HAVE_fyl2xp1xf3_i387 (TARGET_USE_FANCY_MATH_387 \
269 && flag_unsafe_math_optimizations)
270#define HAVE_fyl2xp1_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
271 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
272 || TARGET_MIX_SSE_I387) \
273 && flag_unsafe_math_optimizations)
274#define HAVE_fyl2xp1_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
275 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
276 || TARGET_MIX_SSE_I387) \
277 && flag_unsafe_math_optimizations)
278#define HAVE_fxtractxf3_i387 (TARGET_USE_FANCY_MATH_387 \
279 && flag_unsafe_math_optimizations)
280#define HAVE_fxtract_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
281 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
282 || TARGET_MIX_SSE_I387) \
283 && flag_unsafe_math_optimizations)
284#define HAVE_fxtract_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
285 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
286 || TARGET_MIX_SSE_I387) \
287 && flag_unsafe_math_optimizations)
288#define HAVE_fscalexf4_i387 (TARGET_USE_FANCY_MATH_387 \
289 && flag_unsafe_math_optimizations)
290#define HAVE_sse4_1_roundsf2 (TARGET_ROUND)
291#define HAVE_sse4_1_rounddf2 (TARGET_ROUND)
292#define HAVE_rintxf2 (TARGET_USE_FANCY_MATH_387)
293#define HAVE_rintsf2_frndint (TARGET_USE_FANCY_MATH_387)
294#define HAVE_rintdf2_frndint (TARGET_USE_FANCY_MATH_387)
295#define HAVE_fistdi2 (TARGET_USE_FANCY_MATH_387)
296#define HAVE_fistdi2_with_temp (TARGET_USE_FANCY_MATH_387)
297#define HAVE_fisthi2 (TARGET_USE_FANCY_MATH_387)
298#define HAVE_fistsi2 (TARGET_USE_FANCY_MATH_387)
299#define HAVE_fisthi2_with_temp (TARGET_USE_FANCY_MATH_387)
300#define HAVE_fistsi2_with_temp (TARGET_USE_FANCY_MATH_387)
301#define HAVE_frndintsf2_floor (TARGET_USE_FANCY_MATH_387 \
302 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
303 && can_create_pseudo_p ())
304#define HAVE_frndintsf2_ceil (TARGET_USE_FANCY_MATH_387 \
305 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
306 && can_create_pseudo_p ())
307#define HAVE_frndintsf2_trunc (TARGET_USE_FANCY_MATH_387 \
308 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
309 && can_create_pseudo_p ())
310#define HAVE_frndintdf2_floor (TARGET_USE_FANCY_MATH_387 \
311 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
312 && can_create_pseudo_p ())
313#define HAVE_frndintdf2_ceil (TARGET_USE_FANCY_MATH_387 \
314 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
315 && can_create_pseudo_p ())
316#define HAVE_frndintdf2_trunc (TARGET_USE_FANCY_MATH_387 \
317 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
318 && can_create_pseudo_p ())
319#define HAVE_frndintxf2_floor (TARGET_USE_FANCY_MATH_387 \
320 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
321 && can_create_pseudo_p ())
322#define HAVE_frndintxf2_ceil (TARGET_USE_FANCY_MATH_387 \
323 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
324 && can_create_pseudo_p ())
325#define HAVE_frndintxf2_trunc (TARGET_USE_FANCY_MATH_387 \
326 && (flag_fp_int_builtin_inexact || !flag_trapping_math) \
327 && can_create_pseudo_p ())
328#define HAVE_frndintsf2_floor_i387 (TARGET_USE_FANCY_MATH_387 \
329 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
330#define HAVE_frndintsf2_ceil_i387 (TARGET_USE_FANCY_MATH_387 \
331 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
332#define HAVE_frndintsf2_trunc_i387 (TARGET_USE_FANCY_MATH_387 \
333 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
334#define HAVE_frndintdf2_floor_i387 (TARGET_USE_FANCY_MATH_387 \
335 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
336#define HAVE_frndintdf2_ceil_i387 (TARGET_USE_FANCY_MATH_387 \
337 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
338#define HAVE_frndintdf2_trunc_i387 (TARGET_USE_FANCY_MATH_387 \
339 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
340#define HAVE_frndintxf2_floor_i387 (TARGET_USE_FANCY_MATH_387 \
341 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
342#define HAVE_frndintxf2_ceil_i387 (TARGET_USE_FANCY_MATH_387 \
343 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
344#define HAVE_frndintxf2_trunc_i387 (TARGET_USE_FANCY_MATH_387 \
345 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
346#define HAVE_frndintxf2_mask_pm (TARGET_USE_FANCY_MATH_387 \
347 && flag_unsafe_math_optimizations \
348 && can_create_pseudo_p ())
349#define HAVE_frndintxf2_mask_pm_i387 (TARGET_USE_FANCY_MATH_387 \
350 && flag_unsafe_math_optimizations)
351#define HAVE_fistdi2_floor (TARGET_USE_FANCY_MATH_387 \
352 && flag_unsafe_math_optimizations)
353#define HAVE_fistdi2_ceil (TARGET_USE_FANCY_MATH_387 \
354 && flag_unsafe_math_optimizations)
355#define HAVE_fistdi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
356 && flag_unsafe_math_optimizations)
357#define HAVE_fistdi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
358 && flag_unsafe_math_optimizations)
359#define HAVE_fisthi2_floor (TARGET_USE_FANCY_MATH_387 \
360 && flag_unsafe_math_optimizations)
361#define HAVE_fisthi2_ceil (TARGET_USE_FANCY_MATH_387 \
362 && flag_unsafe_math_optimizations)
363#define HAVE_fistsi2_floor (TARGET_USE_FANCY_MATH_387 \
364 && flag_unsafe_math_optimizations)
365#define HAVE_fistsi2_ceil (TARGET_USE_FANCY_MATH_387 \
366 && flag_unsafe_math_optimizations)
367#define HAVE_fisthi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
368 && flag_unsafe_math_optimizations)
369#define HAVE_fisthi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
370 && flag_unsafe_math_optimizations)
371#define HAVE_fistsi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
372 && flag_unsafe_math_optimizations)
373#define HAVE_fistsi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
374 && flag_unsafe_math_optimizations)
375#define HAVE_fxamsf2_i387 (TARGET_USE_FANCY_MATH_387)
376#define HAVE_fxamdf2_i387 (TARGET_USE_FANCY_MATH_387)
377#define HAVE_fxamxf2_i387 (TARGET_USE_FANCY_MATH_387)
378#define HAVE_fxamsf2_i387_with_temp (TARGET_USE_FANCY_MATH_387 \
379 && can_create_pseudo_p ())
380#define HAVE_fxamdf2_i387_with_temp (TARGET_USE_FANCY_MATH_387 \
381 && can_create_pseudo_p ())
382#define HAVE_movmsk_df (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
383#define HAVE_cld 1
384#define HAVE_smaxsf3 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
385#define HAVE_sminsf3 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
386#define HAVE_smaxdf3 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
387#define HAVE_smindf3 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
388#define HAVE_pro_epilogue_adjust_stack_si_add (Pmode == SImode)
389#define HAVE_pro_epilogue_adjust_stack_di_add (Pmode == DImode)
390#define HAVE_pro_epilogue_adjust_stack_si_sub (Pmode == SImode)
391#define HAVE_pro_epilogue_adjust_stack_di_sub (Pmode == DImode)
392#define HAVE_allocate_stack_worker_probe_si ((ix86_target_stack_probe ()) && (Pmode == SImode))
393#define HAVE_allocate_stack_worker_probe_di ((ix86_target_stack_probe ()) && (Pmode == DImode))
394#define HAVE_probe_stack_si (word_mode == SImode)
395#define HAVE_probe_stack_di (word_mode == DImode)
396#define HAVE_adjust_stack_and_probesi (Pmode == SImode)
397#define HAVE_adjust_stack_and_probedi (Pmode == DImode)
398#define HAVE_probe_stack_rangesi (Pmode == SImode)
399#define HAVE_probe_stack_rangedi (Pmode == DImode)
400#define HAVE_trap 1
401#define HAVE_stack_protect_set_si ((TARGET_SSP_TLS_GUARD) && (ptr_mode == SImode))
402#define HAVE_stack_protect_set_di ((TARGET_SSP_TLS_GUARD) && (ptr_mode == DImode))
403#define HAVE_stack_tls_protect_set_si (ptr_mode == SImode)
404#define HAVE_stack_tls_protect_set_di (ptr_mode == DImode)
405#define HAVE_stack_protect_test_si ((TARGET_SSP_TLS_GUARD) && (ptr_mode == SImode))
406#define HAVE_stack_protect_test_di ((TARGET_SSP_TLS_GUARD) && (ptr_mode == DImode))
407#define HAVE_stack_tls_protect_test_si (ptr_mode == SImode)
408#define HAVE_stack_tls_protect_test_di (ptr_mode == DImode)
409#define HAVE_sse4_2_crc32qi (TARGET_SSE4_2 || TARGET_CRC32)
410#define HAVE_sse4_2_crc32hi (TARGET_SSE4_2 || TARGET_CRC32)
411#define HAVE_sse4_2_crc32si (TARGET_SSE4_2 || TARGET_CRC32)
412#define HAVE_sse4_2_crc32di (TARGET_64BIT && (TARGET_SSE4_2 || TARGET_CRC32))
413#define HAVE_rdpmc (!TARGET_64BIT)
414#define HAVE_rdpmc_rex64 (TARGET_64BIT)
415#define HAVE_rdtsc (!TARGET_64BIT)
416#define HAVE_rdtsc_rex64 (TARGET_64BIT)
417#define HAVE_rdtscp (!TARGET_64BIT)
418#define HAVE_rdtscp_rex64 (TARGET_64BIT)
419#define HAVE_fxsave (TARGET_FXSR)
420#define HAVE_fxsave64 (TARGET_64BIT && TARGET_FXSR)
421#define HAVE_fxrstor (TARGET_FXSR)
422#define HAVE_fxrstor64 (TARGET_64BIT && TARGET_FXSR)
423#define HAVE_xsave (!TARGET_64BIT && TARGET_XSAVE)
424#define HAVE_xsaveopt ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
425#define HAVE_xsavec ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
426#define HAVE_xsaves ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
427#define HAVE_xsave_rex64 (TARGET_64BIT && TARGET_XSAVE)
428#define HAVE_xsaveopt_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
429#define HAVE_xsavec_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
430#define HAVE_xsaves_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
431#define HAVE_xsave64 (TARGET_64BIT && TARGET_XSAVE)
432#define HAVE_xsaveopt64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
433#define HAVE_xsavec64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
434#define HAVE_xsaves64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
435#define HAVE_xrstor (!TARGET_64BIT && TARGET_XSAVE)
436#define HAVE_xrstors ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
437#define HAVE_xrstor_rex64 (TARGET_64BIT && TARGET_XSAVE)
438#define HAVE_xrstors_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
439#define HAVE_xrstor64 (TARGET_64BIT && TARGET_XSAVE)
440#define HAVE_xrstors64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
441#define HAVE_fnstenv (TARGET_80387)
442#define HAVE_fldenv (TARGET_80387)
443#define HAVE_fnstsw (TARGET_80387)
444#define HAVE_fnclex (TARGET_80387)
445#define HAVE_lwp_slwpcbsi ((TARGET_LWP) && (Pmode == SImode))
446#define HAVE_lwp_slwpcbdi ((TARGET_LWP) && (Pmode == DImode))
447#define HAVE_rdfsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
448#define HAVE_rdgsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
449#define HAVE_rdfsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
450#define HAVE_rdgsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
451#define HAVE_wrfsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
452#define HAVE_wrgsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
453#define HAVE_wrfsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
454#define HAVE_wrgsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
455#define HAVE_rdrandhi_1 (TARGET_RDRND)
456#define HAVE_rdrandsi_1 (TARGET_RDRND)
457#define HAVE_rdranddi_1 ((TARGET_RDRND) && (TARGET_64BIT))
458#define HAVE_rdseedhi_1 (TARGET_RDSEED)
459#define HAVE_rdseedsi_1 (TARGET_RDSEED)
460#define HAVE_rdseeddi_1 ((TARGET_RDSEED) && (TARGET_64BIT))
461#define HAVE_xbegin_1 (TARGET_RTM)
462#define HAVE_xend (TARGET_RTM)
463#define HAVE_xabort (TARGET_RTM)
464#define HAVE_xtest_1 (TARGET_RTM)
465#define HAVE_clwb (TARGET_CLWB)
466#define HAVE_clflushopt (TARGET_CLFLUSHOPT)
467#define HAVE_mwaitx (TARGET_MWAITX)
468#define HAVE_monitorx_si ((TARGET_MWAITX) && (Pmode == SImode))
469#define HAVE_monitorx_di ((TARGET_MWAITX) && (Pmode == DImode))
470#define HAVE_clzero_si ((TARGET_CLZERO) && (Pmode == SImode))
471#define HAVE_clzero_di ((TARGET_CLZERO) && (Pmode == DImode))
472#define HAVE_move_size_reloc_si (TARGET_MPX)
473#define HAVE_move_size_reloc_di ((TARGET_MPX) && (TARGET_64BIT))
474#define HAVE_rdpid (TARGET_RDPID)
475#define HAVE_sse_movntq (TARGET_SSE || TARGET_3DNOW_A)
476#define HAVE_mmx_ieee_maxv2sf3 (TARGET_3DNOW)
477#define HAVE_mmx_ieee_minv2sf3 (TARGET_3DNOW)
478#define HAVE_mmx_rcpv2sf2 (TARGET_3DNOW)
479#define HAVE_mmx_rcpit1v2sf3 (TARGET_3DNOW)
480#define HAVE_mmx_rcpit2v2sf3 (TARGET_3DNOW)
481#define HAVE_mmx_rsqrtv2sf2 (TARGET_3DNOW)
482#define HAVE_mmx_rsqit1v2sf3 (TARGET_3DNOW)
483#define HAVE_mmx_haddv2sf3 (TARGET_3DNOW)
484#define HAVE_mmx_hsubv2sf3 (TARGET_3DNOW_A)
485#define HAVE_mmx_addsubv2sf3 (TARGET_3DNOW_A)
486#define HAVE_mmx_gtv2sf3 (TARGET_3DNOW)
487#define HAVE_mmx_gev2sf3 (TARGET_3DNOW)
488#define HAVE_mmx_pf2id (TARGET_3DNOW)
489#define HAVE_mmx_pf2iw (TARGET_3DNOW_A)
490#define HAVE_mmx_pi2fw (TARGET_3DNOW_A)
491#define HAVE_mmx_floatv2si2 (TARGET_3DNOW)
492#define HAVE_mmx_pswapdv2sf2 (TARGET_3DNOW_A)
493#define HAVE_mmx_ashrv4hi3 (TARGET_MMX)
494#define HAVE_mmx_ashrv2si3 (TARGET_MMX)
495#define HAVE_mmx_ashlv4hi3 (TARGET_MMX)
496#define HAVE_mmx_lshrv4hi3 (TARGET_MMX)
497#define HAVE_mmx_ashlv2si3 (TARGET_MMX)
498#define HAVE_mmx_lshrv2si3 (TARGET_MMX)
499#define HAVE_mmx_ashlv1di3 (TARGET_MMX)
500#define HAVE_mmx_lshrv1di3 (TARGET_MMX)
501#define HAVE_mmx_gtv8qi3 (TARGET_MMX)
502#define HAVE_mmx_gtv4hi3 (TARGET_MMX)
503#define HAVE_mmx_gtv2si3 (TARGET_MMX)
504#define HAVE_mmx_andnotv8qi3 (TARGET_MMX)
505#define HAVE_mmx_andnotv4hi3 (TARGET_MMX)
506#define HAVE_mmx_andnotv2si3 (TARGET_MMX)
507#define HAVE_mmx_packsswb (TARGET_MMX)
508#define HAVE_mmx_packssdw (TARGET_MMX)
509#define HAVE_mmx_packuswb (TARGET_MMX)
510#define HAVE_mmx_punpckhbw (TARGET_MMX)
511#define HAVE_mmx_punpcklbw (TARGET_MMX)
512#define HAVE_mmx_punpckhwd (TARGET_MMX)
513#define HAVE_mmx_punpcklwd (TARGET_MMX)
514#define HAVE_mmx_punpckhdq (TARGET_MMX)
515#define HAVE_mmx_punpckldq (TARGET_MMX)
516#define HAVE_mmx_pextrw (TARGET_SSE || TARGET_3DNOW_A)
517#define HAVE_mmx_pshufw_1 (TARGET_SSE || TARGET_3DNOW_A)
518#define HAVE_mmx_pswapdv2si2 (TARGET_3DNOW_A)
519#define HAVE_mmx_psadbw (TARGET_SSE || TARGET_3DNOW_A)
520#define HAVE_mmx_pmovmskb (TARGET_SSE || TARGET_3DNOW_A)
521#define HAVE_movv64qi_internal ((TARGET_SSE \
522 && (register_operand (operands[0], V64QImode) \
523 || register_operand (operands[1], V64QImode))) && (TARGET_AVX512F))
524#define HAVE_movv32qi_internal ((TARGET_SSE \
525 && (register_operand (operands[0], V32QImode) \
526 || register_operand (operands[1], V32QImode))) && (TARGET_AVX))
527#define HAVE_movv16qi_internal (TARGET_SSE \
528 && (register_operand (operands[0], V16QImode) \
529 || register_operand (operands[1], V16QImode)))
530#define HAVE_movv32hi_internal ((TARGET_SSE \
531 && (register_operand (operands[0], V32HImode) \
532 || register_operand (operands[1], V32HImode))) && (TARGET_AVX512F))
533#define HAVE_movv16hi_internal ((TARGET_SSE \
534 && (register_operand (operands[0], V16HImode) \
535 || register_operand (operands[1], V16HImode))) && (TARGET_AVX))
536#define HAVE_movv8hi_internal (TARGET_SSE \
537 && (register_operand (operands[0], V8HImode) \
538 || register_operand (operands[1], V8HImode)))
539#define HAVE_movv16si_internal ((TARGET_SSE \
540 && (register_operand (operands[0], V16SImode) \
541 || register_operand (operands[1], V16SImode))) && (TARGET_AVX512F))
542#define HAVE_movv8si_internal ((TARGET_SSE \
543 && (register_operand (operands[0], V8SImode) \
544 || register_operand (operands[1], V8SImode))) && (TARGET_AVX))
545#define HAVE_movv4si_internal (TARGET_SSE \
546 && (register_operand (operands[0], V4SImode) \
547 || register_operand (operands[1], V4SImode)))
548#define HAVE_movv8di_internal ((TARGET_SSE \
549 && (register_operand (operands[0], V8DImode) \
550 || register_operand (operands[1], V8DImode))) && (TARGET_AVX512F))
551#define HAVE_movv4di_internal ((TARGET_SSE \
552 && (register_operand (operands[0], V4DImode) \
553 || register_operand (operands[1], V4DImode))) && (TARGET_AVX))
554#define HAVE_movv2di_internal (TARGET_SSE \
555 && (register_operand (operands[0], V2DImode) \
556 || register_operand (operands[1], V2DImode)))
557#define HAVE_movv4ti_internal ((TARGET_SSE \
558 && (register_operand (operands[0], V4TImode) \
559 || register_operand (operands[1], V4TImode))) && (TARGET_AVX512BW))
560#define HAVE_movv2ti_internal ((TARGET_SSE \
561 && (register_operand (operands[0], V2TImode) \
562 || register_operand (operands[1], V2TImode))) && (TARGET_AVX))
563#define HAVE_movv1ti_internal (TARGET_SSE \
564 && (register_operand (operands[0], V1TImode) \
565 || register_operand (operands[1], V1TImode)))
566#define HAVE_movv16sf_internal ((TARGET_SSE \
567 && (register_operand (operands[0], V16SFmode) \
568 || register_operand (operands[1], V16SFmode))) && (TARGET_AVX512F))
569#define HAVE_movv8sf_internal ((TARGET_SSE \
570 && (register_operand (operands[0], V8SFmode) \
571 || register_operand (operands[1], V8SFmode))) && (TARGET_AVX))
572#define HAVE_movv4sf_internal (TARGET_SSE \
573 && (register_operand (operands[0], V4SFmode) \
574 || register_operand (operands[1], V4SFmode)))
575#define HAVE_movv8df_internal ((TARGET_SSE \
576 && (register_operand (operands[0], V8DFmode) \
577 || register_operand (operands[1], V8DFmode))) && (TARGET_AVX512F))
578#define HAVE_movv4df_internal ((TARGET_SSE \
579 && (register_operand (operands[0], V4DFmode) \
580 || register_operand (operands[1], V4DFmode))) && (TARGET_AVX))
581#define HAVE_movv2df_internal (TARGET_SSE \
582 && (register_operand (operands[0], V2DFmode) \
583 || register_operand (operands[1], V2DFmode)))
584#define HAVE_avx512f_loadv16si_mask (TARGET_AVX512F)
585#define HAVE_avx512vl_loadv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
586#define HAVE_avx512vl_loadv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
587#define HAVE_avx512f_loadv8di_mask (TARGET_AVX512F)
588#define HAVE_avx512vl_loadv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
589#define HAVE_avx512vl_loadv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
590#define HAVE_avx512f_loadv16sf_mask (TARGET_AVX512F)
591#define HAVE_avx512vl_loadv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
592#define HAVE_avx512vl_loadv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
593#define HAVE_avx512f_loadv8df_mask (TARGET_AVX512F)
594#define HAVE_avx512vl_loadv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
595#define HAVE_avx512vl_loadv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
596#define HAVE_avx512bw_loadv64qi_mask (TARGET_AVX512BW)
597#define HAVE_avx512vl_loadv16qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
598#define HAVE_avx512vl_loadv32qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
599#define HAVE_avx512bw_loadv32hi_mask (TARGET_AVX512BW)
600#define HAVE_avx512vl_loadv16hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
601#define HAVE_avx512vl_loadv8hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
602#define HAVE_avx512f_blendmv16si (TARGET_AVX512F)
603#define HAVE_avx512vl_blendmv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
604#define HAVE_avx512vl_blendmv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
605#define HAVE_avx512f_blendmv8di (TARGET_AVX512F)
606#define HAVE_avx512vl_blendmv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
607#define HAVE_avx512vl_blendmv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
608#define HAVE_avx512f_blendmv16sf (TARGET_AVX512F)
609#define HAVE_avx512vl_blendmv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
610#define HAVE_avx512vl_blendmv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
611#define HAVE_avx512f_blendmv8df (TARGET_AVX512F)
612#define HAVE_avx512vl_blendmv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
613#define HAVE_avx512vl_blendmv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
614#define HAVE_avx512bw_blendmv64qi (TARGET_AVX512BW)
615#define HAVE_avx512vl_blendmv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
616#define HAVE_avx512vl_blendmv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
617#define HAVE_avx512bw_blendmv32hi (TARGET_AVX512BW)
618#define HAVE_avx512vl_blendmv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
619#define HAVE_avx512vl_blendmv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
620#define HAVE_avx512f_storev16si_mask (TARGET_AVX512F)
621#define HAVE_avx512vl_storev8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
622#define HAVE_avx512vl_storev4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
623#define HAVE_avx512f_storev8di_mask (TARGET_AVX512F)
624#define HAVE_avx512vl_storev4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
625#define HAVE_avx512vl_storev2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
626#define HAVE_avx512f_storev16sf_mask (TARGET_AVX512F)
627#define HAVE_avx512vl_storev8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
628#define HAVE_avx512vl_storev4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
629#define HAVE_avx512f_storev8df_mask (TARGET_AVX512F)
630#define HAVE_avx512vl_storev4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
631#define HAVE_avx512vl_storev2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
632#define HAVE_avx512bw_storev64qi_mask (TARGET_AVX512BW)
633#define HAVE_avx512vl_storev16qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
634#define HAVE_avx512vl_storev32qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
635#define HAVE_avx512bw_storev32hi_mask (TARGET_AVX512BW)
636#define HAVE_avx512vl_storev16hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
637#define HAVE_avx512vl_storev8hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
638#define HAVE_sse2_movq128 (TARGET_SSE2)
639#define HAVE_movdi_to_sse (!TARGET_64BIT && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC)
640#define HAVE_avx_lddqu256 ((TARGET_SSE3) && (TARGET_AVX))
641#define HAVE_sse3_lddqu (TARGET_SSE3)
642#define HAVE_sse2_movntisi (TARGET_SSE2)
643#define HAVE_sse2_movntidi ((TARGET_SSE2) && (TARGET_64BIT))
644#define HAVE_avx512f_movntv16sf ((TARGET_SSE) && (TARGET_AVX512F))
645#define HAVE_avx_movntv8sf ((TARGET_SSE) && (TARGET_AVX))
646#define HAVE_sse_movntv4sf (TARGET_SSE)
647#define HAVE_avx512f_movntv8df ((TARGET_SSE) && (TARGET_AVX512F))
648#define HAVE_avx_movntv4df ((TARGET_SSE) && (TARGET_AVX))
649#define HAVE_sse2_movntv2df ((TARGET_SSE) && (TARGET_SSE2))
650#define HAVE_avx512f_movntv8di ((TARGET_SSE2) && (TARGET_AVX512F))
651#define HAVE_avx_movntv4di ((TARGET_SSE2) && (TARGET_AVX))
652#define HAVE_sse2_movntv2di (TARGET_SSE2)
653#define HAVE_kandqi (TARGET_AVX512F)
654#define HAVE_kiorqi (TARGET_AVX512F)
655#define HAVE_kxorqi (TARGET_AVX512F)
656#define HAVE_kandhi (TARGET_AVX512F)
657#define HAVE_kiorhi (TARGET_AVX512F)
658#define HAVE_kxorhi (TARGET_AVX512F)
659#define HAVE_kandsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
660#define HAVE_kiorsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
661#define HAVE_kxorsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
662#define HAVE_kanddi ((TARGET_AVX512F) && (TARGET_AVX512BW))
663#define HAVE_kiordi ((TARGET_AVX512F) && (TARGET_AVX512BW))
664#define HAVE_kxordi ((TARGET_AVX512F) && (TARGET_AVX512BW))
665#define HAVE_kandnqi (TARGET_AVX512F)
666#define HAVE_kandnhi (TARGET_AVX512F)
667#define HAVE_kandnsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
668#define HAVE_kandndi ((TARGET_AVX512F) && (TARGET_AVX512BW))
669#define HAVE_kxnorqi (TARGET_AVX512F)
670#define HAVE_kxnorhi (TARGET_AVX512F)
671#define HAVE_kxnorsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
672#define HAVE_kxnordi ((TARGET_AVX512F) && (TARGET_AVX512BW))
673#define HAVE_knotqi (TARGET_AVX512F)
674#define HAVE_knothi (TARGET_AVX512F)
675#define HAVE_knotsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
676#define HAVE_knotdi ((TARGET_AVX512F) && (TARGET_AVX512BW))
677#define HAVE_kaddqi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
678#define HAVE_kaddhi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
679#define HAVE_kaddsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
680#define HAVE_kadddi ((TARGET_AVX512F) && (TARGET_AVX512BW))
681#define HAVE_kashiftqi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
682#define HAVE_klshiftrtqi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
683#define HAVE_kashifthi (TARGET_AVX512F)
684#define HAVE_klshiftrthi (TARGET_AVX512F)
685#define HAVE_kashiftsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
686#define HAVE_klshiftrtsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
687#define HAVE_kashiftdi ((TARGET_AVX512F) && (TARGET_AVX512BW))
688#define HAVE_klshiftrtdi ((TARGET_AVX512F) && (TARGET_AVX512BW))
689#define HAVE_ktestqi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
690#define HAVE_ktesthi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
691#define HAVE_ktestsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
692#define HAVE_ktestdi ((TARGET_AVX512F) && (TARGET_AVX512BW))
693#define HAVE_kortestqi ((TARGET_AVX512F) && (TARGET_AVX512DQ))
694#define HAVE_kortesthi (TARGET_AVX512F)
695#define HAVE_kortestsi ((TARGET_AVX512F) && (TARGET_AVX512BW))
696#define HAVE_kortestdi ((TARGET_AVX512F) && (TARGET_AVX512BW))
697#define HAVE_kunpckhi (TARGET_AVX512F)
698#define HAVE_kunpcksi (TARGET_AVX512BW)
699#define HAVE_kunpckdi (TARGET_AVX512BW)
700#define HAVE_sse_vmaddv4sf3 (TARGET_SSE)
701#define HAVE_sse_vmaddv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
702#define HAVE_sse_vmsubv4sf3 (TARGET_SSE)
703#define HAVE_sse_vmsubv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
704#define HAVE_sse2_vmaddv2df3 ((TARGET_SSE) && (TARGET_SSE2))
705#define HAVE_sse2_vmaddv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
706#define HAVE_sse2_vmsubv2df3 ((TARGET_SSE) && (TARGET_SSE2))
707#define HAVE_sse2_vmsubv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
708#define HAVE_sse_vmmulv4sf3 (TARGET_SSE)
709#define HAVE_sse_vmmulv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
710#define HAVE_sse_vmdivv4sf3 (TARGET_SSE)
711#define HAVE_sse_vmdivv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
712#define HAVE_sse2_vmmulv2df3 ((TARGET_SSE) && (TARGET_SSE2))
713#define HAVE_sse2_vmmulv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
714#define HAVE_sse2_vmdivv2df3 ((TARGET_SSE) && (TARGET_SSE2))
715#define HAVE_sse2_vmdivv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
716#define HAVE_avx512f_divv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
717#define HAVE_avx512f_divv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
718 || V16SFmode == V8DFmode \
719 || V16SFmode == V8DImode \
720 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
721#define HAVE_avx512f_divv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
722#define HAVE_avx512f_divv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
723 || V16SFmode == V8DFmode \
724 || V16SFmode == V8DImode \
725 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
726#define HAVE_avx_divv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
727#define HAVE_avx_divv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
728#define HAVE_sse_divv4sf3 (TARGET_SSE && 1 && 1)
729#define HAVE_sse_divv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
730#define HAVE_avx512f_divv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
731#define HAVE_avx512f_divv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
732 || V8DFmode == V8DFmode \
733 || V8DFmode == V8DImode \
734 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
735#define HAVE_avx512f_divv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
736#define HAVE_avx512f_divv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
737 || V8DFmode == V8DFmode \
738 || V8DFmode == V8DImode \
739 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
740#define HAVE_avx_divv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
741#define HAVE_avx_divv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
742#define HAVE_sse2_divv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
743#define HAVE_sse2_divv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
744#define HAVE_avx_rcpv8sf2 ((TARGET_SSE) && (TARGET_AVX))
745#define HAVE_sse_rcpv4sf2 (TARGET_SSE)
746#define HAVE_sse_vmrcpv4sf2 (TARGET_SSE)
747#define HAVE_rcp14v16sf_mask (TARGET_AVX512F)
748#define HAVE_rcp14v8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
749#define HAVE_rcp14v4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
750#define HAVE_rcp14v8df_mask (TARGET_AVX512F)
751#define HAVE_rcp14v4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
752#define HAVE_rcp14v2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
753#define HAVE_srcp14v4sf (TARGET_AVX512F)
754#define HAVE_srcp14v2df ((TARGET_AVX512F) && (TARGET_SSE2))
755#define HAVE_avx512f_sqrtv16sf2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
756#define HAVE_avx512f_sqrtv16sf2_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
757 || V16SFmode == V8DFmode \
758 || V16SFmode == V8DImode \
759 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
760#define HAVE_avx512f_sqrtv16sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
761#define HAVE_avx512f_sqrtv16sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
762 || V16SFmode == V8DFmode \
763 || V16SFmode == V8DImode \
764 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
765#define HAVE_avx_sqrtv8sf2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
766#define HAVE_avx_sqrtv8sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
767#define HAVE_sse_sqrtv4sf2 (TARGET_SSE && 1 && 1)
768#define HAVE_sse_sqrtv4sf2_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
769#define HAVE_avx512f_sqrtv8df2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
770#define HAVE_avx512f_sqrtv8df2_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
771 || V8DFmode == V8DFmode \
772 || V8DFmode == V8DImode \
773 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
774#define HAVE_avx512f_sqrtv8df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
775#define HAVE_avx512f_sqrtv8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
776 || V8DFmode == V8DFmode \
777 || V8DFmode == V8DImode \
778 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
779#define HAVE_avx_sqrtv4df2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
780#define HAVE_avx_sqrtv4df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
781#define HAVE_sse2_sqrtv2df2 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
782#define HAVE_sse2_sqrtv2df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
783#define HAVE_sse_vmsqrtv4sf2 (TARGET_SSE)
784#define HAVE_sse_vmsqrtv4sf2_round ((TARGET_AVX512F) && (TARGET_SSE))
785#define HAVE_sse2_vmsqrtv2df2 ((TARGET_SSE) && (TARGET_SSE2))
786#define HAVE_sse2_vmsqrtv2df2_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
787#define HAVE_avx_rsqrtv8sf2 ((TARGET_SSE) && (TARGET_AVX))
788#define HAVE_sse_rsqrtv4sf2 (TARGET_SSE)
789#define HAVE_rsqrt14v16sf_mask (TARGET_AVX512F)
790#define HAVE_rsqrt14v8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
791#define HAVE_rsqrt14v4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
792#define HAVE_rsqrt14v8df_mask (TARGET_AVX512F)
793#define HAVE_rsqrt14v4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
794#define HAVE_rsqrt14v2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
795#define HAVE_rsqrt14v4sf (TARGET_AVX512F)
796#define HAVE_rsqrt14v2df ((TARGET_AVX512F) && (TARGET_SSE2))
797#define HAVE_sse_vmrsqrtv4sf2 (TARGET_SSE)
798#define HAVE_ieee_maxv16sf3 ((TARGET_SSE \
799 && 1 && 1) && (TARGET_AVX512F))
800#define HAVE_ieee_maxv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE \
801 && 1 && (V16SFmode == V16SFmode \
802 || V16SFmode == V8DFmode \
803 || V16SFmode == V8DImode \
804 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
805#define HAVE_ieee_maxv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
806 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
807#define HAVE_ieee_maxv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
808 && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
809 || V16SFmode == V8DFmode \
810 || V16SFmode == V8DImode \
811 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
812#define HAVE_ieee_minv16sf3 ((TARGET_SSE \
813 && 1 && 1) && (TARGET_AVX512F))
814#define HAVE_ieee_minv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE \
815 && 1 && (V16SFmode == V16SFmode \
816 || V16SFmode == V8DFmode \
817 || V16SFmode == V8DImode \
818 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
819#define HAVE_ieee_minv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
820 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
821#define HAVE_ieee_minv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
822 && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
823 || V16SFmode == V8DFmode \
824 || V16SFmode == V8DImode \
825 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
826#define HAVE_ieee_maxv8sf3 ((TARGET_SSE \
827 && 1 && 1) && (TARGET_AVX))
828#define HAVE_ieee_maxv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
829 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
830#define HAVE_ieee_minv8sf3 ((TARGET_SSE \
831 && 1 && 1) && (TARGET_AVX))
832#define HAVE_ieee_minv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
833 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
834#define HAVE_ieee_maxv4sf3 (TARGET_SSE \
835 && 1 && 1)
836#define HAVE_ieee_maxv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE \
837 && (16 == 64 || TARGET_AVX512VL) && 1))
838#define HAVE_ieee_minv4sf3 (TARGET_SSE \
839 && 1 && 1)
840#define HAVE_ieee_minv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE \
841 && (16 == 64 || TARGET_AVX512VL) && 1))
842#define HAVE_ieee_maxv8df3 ((TARGET_SSE \
843 && 1 && 1) && (TARGET_AVX512F))
844#define HAVE_ieee_maxv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE \
845 && 1 && (V8DFmode == V16SFmode \
846 || V8DFmode == V8DFmode \
847 || V8DFmode == V8DImode \
848 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
849#define HAVE_ieee_maxv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
850 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
851#define HAVE_ieee_maxv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
852 && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
853 || V8DFmode == V8DFmode \
854 || V8DFmode == V8DImode \
855 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
856#define HAVE_ieee_minv8df3 ((TARGET_SSE \
857 && 1 && 1) && (TARGET_AVX512F))
858#define HAVE_ieee_minv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE \
859 && 1 && (V8DFmode == V16SFmode \
860 || V8DFmode == V8DFmode \
861 || V8DFmode == V8DImode \
862 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
863#define HAVE_ieee_minv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
864 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
865#define HAVE_ieee_minv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
866 && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
867 || V8DFmode == V8DFmode \
868 || V8DFmode == V8DImode \
869 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
870#define HAVE_ieee_maxv4df3 ((TARGET_SSE \
871 && 1 && 1) && (TARGET_AVX))
872#define HAVE_ieee_maxv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
873 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
874#define HAVE_ieee_minv4df3 ((TARGET_SSE \
875 && 1 && 1) && (TARGET_AVX))
876#define HAVE_ieee_minv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
877 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
878#define HAVE_ieee_maxv2df3 ((TARGET_SSE \
879 && 1 && 1) && (TARGET_SSE2))
880#define HAVE_ieee_maxv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
881 && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
882#define HAVE_ieee_minv2df3 ((TARGET_SSE \
883 && 1 && 1) && (TARGET_SSE2))
884#define HAVE_ieee_minv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
885 && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
886#define HAVE_sse_vmsmaxv4sf3 (TARGET_SSE)
887#define HAVE_sse_vmsmaxv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
888#define HAVE_sse_vmsminv4sf3 (TARGET_SSE)
889#define HAVE_sse_vmsminv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
890#define HAVE_sse2_vmsmaxv2df3 ((TARGET_SSE) && (TARGET_SSE2))
891#define HAVE_sse2_vmsmaxv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
892#define HAVE_sse2_vmsminv2df3 ((TARGET_SSE) && (TARGET_SSE2))
893#define HAVE_sse2_vmsminv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
894#define HAVE_avx_addsubv4df3 (TARGET_AVX)
895#define HAVE_sse3_addsubv2df3 (TARGET_SSE3)
896#define HAVE_avx_addsubv8sf3 (TARGET_AVX)
897#define HAVE_sse3_addsubv4sf3 (TARGET_SSE3)
898#define HAVE_avx_haddv4df3 (TARGET_AVX)
899#define HAVE_avx_hsubv4df3 (TARGET_AVX)
900#define HAVE_sse3_hsubv2df3 (TARGET_SSE3)
901#define HAVE_avx_haddv8sf3 (TARGET_AVX)
902#define HAVE_avx_hsubv8sf3 (TARGET_AVX)
903#define HAVE_sse3_haddv4sf3 (TARGET_SSE3)
904#define HAVE_sse3_hsubv4sf3 (TARGET_SSE3)
905#define HAVE_reducepv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
906#define HAVE_reducepv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
907#define HAVE_reducepv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
908#define HAVE_reducepv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
909#define HAVE_reducepv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
910#define HAVE_reducepv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
911#define HAVE_reducesv4sf (TARGET_AVX512DQ)
912#define HAVE_reducesv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
913#define HAVE_avx_cmpv8sf3 (TARGET_AVX)
914#define HAVE_avx_cmpv4sf3 (TARGET_AVX)
915#define HAVE_avx_cmpv4df3 (TARGET_AVX)
916#define HAVE_avx_cmpv2df3 ((TARGET_AVX) && (TARGET_SSE2))
917#define HAVE_avx_vmcmpv4sf3 (TARGET_AVX)
918#define HAVE_avx_vmcmpv2df3 ((TARGET_AVX) && (TARGET_SSE2))
919#define HAVE_avx_maskcmpv8sf3 ((TARGET_SSE) && (TARGET_AVX))
920#define HAVE_sse_maskcmpv4sf3 (TARGET_SSE)
921#define HAVE_avx_maskcmpv4df3 ((TARGET_SSE) && (TARGET_AVX))
922#define HAVE_sse2_maskcmpv2df3 ((TARGET_SSE) && (TARGET_SSE2))
923#define HAVE_sse_vmmaskcmpv4sf3 (TARGET_SSE)
924#define HAVE_sse2_vmmaskcmpv2df3 ((TARGET_SSE) && (TARGET_SSE2))
925#define HAVE_avx512f_cmpv16si3 (TARGET_AVX512F && 1)
926#define HAVE_avx512f_cmpv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
927#define HAVE_avx512f_cmpv16si3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SImode == V16SFmode \
928 || V16SImode == V8DFmode \
929 || V16SImode == V8DImode \
930 || V16SImode == V16SImode)))
931#define HAVE_avx512f_cmpv16si3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SImode == V16SFmode \
932 || V16SImode == V8DFmode \
933 || V16SImode == V8DImode \
934 || V16SImode == V16SImode))))
935#define HAVE_avx512vl_cmpv8si3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
936#define HAVE_avx512vl_cmpv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
937#define HAVE_avx512vl_cmpv4si3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
938#define HAVE_avx512vl_cmpv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
939#define HAVE_avx512f_cmpv8di3 (TARGET_AVX512F && 1)
940#define HAVE_avx512f_cmpv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
941#define HAVE_avx512f_cmpv8di3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DImode == V16SFmode \
942 || V8DImode == V8DFmode \
943 || V8DImode == V8DImode \
944 || V8DImode == V16SImode)))
945#define HAVE_avx512f_cmpv8di3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DImode == V16SFmode \
946 || V8DImode == V8DFmode \
947 || V8DImode == V8DImode \
948 || V8DImode == V16SImode))))
949#define HAVE_avx512vl_cmpv4di3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
950#define HAVE_avx512vl_cmpv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
951#define HAVE_avx512vl_cmpv2di3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
952#define HAVE_avx512vl_cmpv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
953#define HAVE_avx512f_cmpv16sf3 (TARGET_AVX512F && 1)
954#define HAVE_avx512f_cmpv16sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
955#define HAVE_avx512f_cmpv16sf3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
956 || V16SFmode == V8DFmode \
957 || V16SFmode == V8DImode \
958 || V16SFmode == V16SImode)))
959#define HAVE_avx512f_cmpv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
960 || V16SFmode == V8DFmode \
961 || V16SFmode == V8DImode \
962 || V16SFmode == V16SImode))))
963#define HAVE_avx512vl_cmpv8sf3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
964#define HAVE_avx512vl_cmpv8sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
965#define HAVE_avx512vl_cmpv4sf3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
966#define HAVE_avx512vl_cmpv4sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
967#define HAVE_avx512f_cmpv8df3 (TARGET_AVX512F && 1)
968#define HAVE_avx512f_cmpv8df3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
969#define HAVE_avx512f_cmpv8df3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
970 || V8DFmode == V8DFmode \
971 || V8DFmode == V8DImode \
972 || V8DFmode == V16SImode)))
973#define HAVE_avx512f_cmpv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
974 || V8DFmode == V8DFmode \
975 || V8DFmode == V8DImode \
976 || V8DFmode == V16SImode))))
977#define HAVE_avx512vl_cmpv4df3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
978#define HAVE_avx512vl_cmpv4df3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
979#define HAVE_avx512vl_cmpv2df3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
980#define HAVE_avx512vl_cmpv2df3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
981#define HAVE_avx512bw_cmpv64qi3 (TARGET_AVX512BW)
982#define HAVE_avx512bw_cmpv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
983#define HAVE_avx512vl_cmpv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
984#define HAVE_avx512vl_cmpv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
985#define HAVE_avx512vl_cmpv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
986#define HAVE_avx512vl_cmpv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
987#define HAVE_avx512bw_cmpv32hi3 (TARGET_AVX512BW)
988#define HAVE_avx512bw_cmpv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
989#define HAVE_avx512vl_cmpv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
990#define HAVE_avx512vl_cmpv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
991#define HAVE_avx512vl_cmpv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
992#define HAVE_avx512vl_cmpv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
993#define HAVE_avx512bw_ucmpv64qi3 (TARGET_AVX512BW)
994#define HAVE_avx512bw_ucmpv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
995#define HAVE_avx512vl_ucmpv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
996#define HAVE_avx512vl_ucmpv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
997#define HAVE_avx512vl_ucmpv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
998#define HAVE_avx512vl_ucmpv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
999#define HAVE_avx512bw_ucmpv32hi3 (TARGET_AVX512BW)
1000#define HAVE_avx512bw_ucmpv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
1001#define HAVE_avx512vl_ucmpv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1002#define HAVE_avx512vl_ucmpv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
1003#define HAVE_avx512vl_ucmpv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1004#define HAVE_avx512vl_ucmpv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
1005#define HAVE_avx512f_ucmpv16si3 (TARGET_AVX512F)
1006#define HAVE_avx512f_ucmpv16si3_mask (TARGET_AVX512F)
1007#define HAVE_avx512vl_ucmpv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1008#define HAVE_avx512vl_ucmpv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1009#define HAVE_avx512vl_ucmpv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1010#define HAVE_avx512vl_ucmpv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1011#define HAVE_avx512f_ucmpv8di3 (TARGET_AVX512F)
1012#define HAVE_avx512f_ucmpv8di3_mask (TARGET_AVX512F)
1013#define HAVE_avx512vl_ucmpv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1014#define HAVE_avx512vl_ucmpv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1015#define HAVE_avx512vl_ucmpv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1016#define HAVE_avx512vl_ucmpv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1017#define HAVE_avx512f_vmcmpv4sf3 (TARGET_AVX512F)
1018#define HAVE_avx512f_vmcmpv4sf3_round (TARGET_AVX512F)
1019#define HAVE_avx512f_vmcmpv2df3 ((TARGET_AVX512F) && (TARGET_SSE2))
1020#define HAVE_avx512f_vmcmpv2df3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1021#define HAVE_avx512f_vmcmpv4sf3_mask (TARGET_AVX512F)
1022#define HAVE_avx512f_vmcmpv4sf3_mask_round (TARGET_AVX512F)
1023#define HAVE_avx512f_vmcmpv2df3_mask ((TARGET_AVX512F) && (TARGET_SSE2))
1024#define HAVE_avx512f_vmcmpv2df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1025#define HAVE_avx512f_maskcmpv16sf3 (TARGET_AVX512F)
1026#define HAVE_avx512f_maskcmpv8sf3 ((TARGET_AVX512F) && (TARGET_AVX))
1027#define HAVE_avx512f_maskcmpv4sf3 (TARGET_AVX512F)
1028#define HAVE_avx512f_maskcmpv8df3 (TARGET_AVX512F)
1029#define HAVE_avx512f_maskcmpv4df3 ((TARGET_AVX512F) && (TARGET_AVX))
1030#define HAVE_avx512f_maskcmpv2df3 ((TARGET_AVX512F) && (TARGET_SSE2))
1031#define HAVE_sse_comi (SSE_FLOAT_MODE_P (SFmode))
1032#define HAVE_sse_comi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (SFmode)))
1033#define HAVE_sse2_comi (SSE_FLOAT_MODE_P (DFmode))
1034#define HAVE_sse2_comi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (DFmode)))
1035#define HAVE_sse_ucomi (SSE_FLOAT_MODE_P (SFmode))
1036#define HAVE_sse_ucomi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (SFmode)))
1037#define HAVE_sse2_ucomi (SSE_FLOAT_MODE_P (DFmode))
1038#define HAVE_sse2_ucomi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (DFmode)))
1039#define HAVE_avx_andnotv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
1040#define HAVE_avx_andnotv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
1041#define HAVE_sse_andnotv4sf3 (TARGET_SSE && 1)
1042#define HAVE_sse_andnotv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
1043#define HAVE_avx_andnotv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
1044#define HAVE_avx_andnotv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
1045#define HAVE_sse2_andnotv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
1046#define HAVE_sse2_andnotv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
1047#define HAVE_avx512f_andnotv16sf3 (TARGET_AVX512F)
1048#define HAVE_avx512f_andnotv16sf3_mask (TARGET_AVX512F)
1049#define HAVE_avx512f_andnotv8df3 (TARGET_AVX512F)
1050#define HAVE_avx512f_andnotv8df3_mask (TARGET_AVX512F)
1051#define HAVE_fma_fmadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1052#define HAVE_fma_fmadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1053 || V16SFmode == V8DFmode \
1054 || V16SFmode == V8DImode \
1055 || V16SFmode == V16SImode)))
1056#define HAVE_fma_fmadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1057#define HAVE_fma_fmadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1058#define HAVE_fma_fmadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1059#define HAVE_fma_fmadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1060 || V8DFmode == V8DFmode \
1061 || V8DFmode == V8DImode \
1062 || V8DFmode == V16SImode)))
1063#define HAVE_fma_fmadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1064#define HAVE_fma_fmadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1065#define HAVE_avx512f_fmadd_v16sf_mask (TARGET_AVX512F && 1)
1066#define HAVE_avx512f_fmadd_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
1067 || V16SFmode == V8DFmode \
1068 || V16SFmode == V8DImode \
1069 || V16SFmode == V16SImode)))
1070#define HAVE_avx512vl_fmadd_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1071#define HAVE_avx512vl_fmadd_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1072#define HAVE_avx512f_fmadd_v8df_mask (TARGET_AVX512F && 1)
1073#define HAVE_avx512f_fmadd_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
1074 || V8DFmode == V8DFmode \
1075 || V8DFmode == V8DImode \
1076 || V8DFmode == V16SImode)))
1077#define HAVE_avx512vl_fmadd_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1078#define HAVE_avx512vl_fmadd_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1079#define HAVE_avx512f_fmadd_v16sf_mask3 (TARGET_AVX512F)
1080#define HAVE_avx512f_fmadd_v16sf_mask3_round (TARGET_AVX512F)
1081#define HAVE_avx512vl_fmadd_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1082#define HAVE_avx512vl_fmadd_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1083#define HAVE_avx512vl_fmadd_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1084#define HAVE_avx512vl_fmadd_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1085#define HAVE_avx512f_fmadd_v8df_mask3 (TARGET_AVX512F)
1086#define HAVE_avx512f_fmadd_v8df_mask3_round (TARGET_AVX512F)
1087#define HAVE_avx512vl_fmadd_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1088#define HAVE_avx512vl_fmadd_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1089#define HAVE_avx512vl_fmadd_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1090#define HAVE_avx512vl_fmadd_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1091#define HAVE_fma_fmsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1092#define HAVE_fma_fmsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1093 || V16SFmode == V8DFmode \
1094 || V16SFmode == V8DImode \
1095 || V16SFmode == V16SImode)))
1096#define HAVE_fma_fmsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1097#define HAVE_fma_fmsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1098#define HAVE_fma_fmsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1099#define HAVE_fma_fmsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1100 || V8DFmode == V8DFmode \
1101 || V8DFmode == V8DImode \
1102 || V8DFmode == V16SImode)))
1103#define HAVE_fma_fmsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1104#define HAVE_fma_fmsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1105#define HAVE_avx512f_fmsub_v16sf_mask (TARGET_AVX512F)
1106#define HAVE_avx512f_fmsub_v16sf_mask_round (TARGET_AVX512F)
1107#define HAVE_avx512vl_fmsub_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1108#define HAVE_avx512vl_fmsub_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1109#define HAVE_avx512vl_fmsub_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1110#define HAVE_avx512vl_fmsub_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1111#define HAVE_avx512f_fmsub_v8df_mask (TARGET_AVX512F)
1112#define HAVE_avx512f_fmsub_v8df_mask_round (TARGET_AVX512F)
1113#define HAVE_avx512vl_fmsub_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1114#define HAVE_avx512vl_fmsub_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1115#define HAVE_avx512vl_fmsub_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1116#define HAVE_avx512vl_fmsub_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1117#define HAVE_avx512f_fmsub_v16sf_mask3 (TARGET_AVX512F && 1)
1118#define HAVE_avx512f_fmsub_v16sf_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
1119 || V16SFmode == V8DFmode \
1120 || V16SFmode == V8DImode \
1121 || V16SFmode == V16SImode)))
1122#define HAVE_avx512vl_fmsub_v8sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1123#define HAVE_avx512vl_fmsub_v4sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1124#define HAVE_avx512f_fmsub_v8df_mask3 (TARGET_AVX512F && 1)
1125#define HAVE_avx512f_fmsub_v8df_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
1126 || V8DFmode == V8DFmode \
1127 || V8DFmode == V8DImode \
1128 || V8DFmode == V16SImode)))
1129#define HAVE_avx512vl_fmsub_v4df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1130#define HAVE_avx512vl_fmsub_v2df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1131#define HAVE_fma_fnmadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1132#define HAVE_fma_fnmadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1133 || V16SFmode == V8DFmode \
1134 || V16SFmode == V8DImode \
1135 || V16SFmode == V16SImode)))
1136#define HAVE_fma_fnmadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1137#define HAVE_fma_fnmadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1138#define HAVE_fma_fnmadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1139#define HAVE_fma_fnmadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1140 || V8DFmode == V8DFmode \
1141 || V8DFmode == V8DImode \
1142 || V8DFmode == V16SImode)))
1143#define HAVE_fma_fnmadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1144#define HAVE_fma_fnmadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1145#define HAVE_avx512f_fnmadd_v16sf_mask (TARGET_AVX512F && 1)
1146#define HAVE_avx512f_fnmadd_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
1147 || V16SFmode == V8DFmode \
1148 || V16SFmode == V8DImode \
1149 || V16SFmode == V16SImode)))
1150#define HAVE_avx512vl_fnmadd_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1151#define HAVE_avx512vl_fnmadd_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1152#define HAVE_avx512f_fnmadd_v8df_mask (TARGET_AVX512F && 1)
1153#define HAVE_avx512f_fnmadd_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
1154 || V8DFmode == V8DFmode \
1155 || V8DFmode == V8DImode \
1156 || V8DFmode == V16SImode)))
1157#define HAVE_avx512vl_fnmadd_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1158#define HAVE_avx512vl_fnmadd_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1159#define HAVE_avx512f_fnmadd_v16sf_mask3 (TARGET_AVX512F && 1)
1160#define HAVE_avx512f_fnmadd_v16sf_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
1161 || V16SFmode == V8DFmode \
1162 || V16SFmode == V8DImode \
1163 || V16SFmode == V16SImode)))
1164#define HAVE_avx512vl_fnmadd_v8sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1165#define HAVE_avx512vl_fnmadd_v4sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1166#define HAVE_avx512f_fnmadd_v8df_mask3 (TARGET_AVX512F && 1)
1167#define HAVE_avx512f_fnmadd_v8df_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
1168 || V8DFmode == V8DFmode \
1169 || V8DFmode == V8DImode \
1170 || V8DFmode == V16SImode)))
1171#define HAVE_avx512vl_fnmadd_v4df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1172#define HAVE_avx512vl_fnmadd_v2df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1173#define HAVE_fma_fnmsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1174#define HAVE_fma_fnmsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1175 || V16SFmode == V8DFmode \
1176 || V16SFmode == V8DImode \
1177 || V16SFmode == V16SImode)))
1178#define HAVE_fma_fnmsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1179#define HAVE_fma_fnmsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1180#define HAVE_fma_fnmsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1181#define HAVE_fma_fnmsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1182 || V8DFmode == V8DFmode \
1183 || V8DFmode == V8DImode \
1184 || V8DFmode == V16SImode)))
1185#define HAVE_fma_fnmsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1186#define HAVE_fma_fnmsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1187#define HAVE_avx512f_fnmsub_v16sf_mask (TARGET_AVX512F && 1)
1188#define HAVE_avx512f_fnmsub_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
1189 || V16SFmode == V8DFmode \
1190 || V16SFmode == V8DImode \
1191 || V16SFmode == V16SImode)))
1192#define HAVE_avx512vl_fnmsub_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1193#define HAVE_avx512vl_fnmsub_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1194#define HAVE_avx512f_fnmsub_v8df_mask (TARGET_AVX512F && 1)
1195#define HAVE_avx512f_fnmsub_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
1196 || V8DFmode == V8DFmode \
1197 || V8DFmode == V8DImode \
1198 || V8DFmode == V16SImode)))
1199#define HAVE_avx512vl_fnmsub_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1200#define HAVE_avx512vl_fnmsub_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
1201#define HAVE_avx512f_fnmsub_v16sf_mask3 (TARGET_AVX512F)
1202#define HAVE_avx512f_fnmsub_v16sf_mask3_round (TARGET_AVX512F)
1203#define HAVE_avx512vl_fnmsub_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1204#define HAVE_avx512vl_fnmsub_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1205#define HAVE_avx512vl_fnmsub_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1206#define HAVE_avx512vl_fnmsub_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1207#define HAVE_avx512f_fnmsub_v8df_mask3 (TARGET_AVX512F)
1208#define HAVE_avx512f_fnmsub_v8df_mask3_round (TARGET_AVX512F)
1209#define HAVE_avx512vl_fnmsub_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1210#define HAVE_avx512vl_fnmsub_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1211#define HAVE_avx512vl_fnmsub_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1212#define HAVE_avx512vl_fnmsub_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1213#define HAVE_fma_fmaddsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1214#define HAVE_fma_fmaddsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1215 || V16SFmode == V8DFmode \
1216 || V16SFmode == V8DImode \
1217 || V16SFmode == V16SImode)))
1218#define HAVE_fma_fmaddsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1219#define HAVE_fma_fmaddsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1220#define HAVE_fma_fmaddsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1221#define HAVE_fma_fmaddsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1222 || V8DFmode == V8DFmode \
1223 || V8DFmode == V8DImode \
1224 || V8DFmode == V16SImode)))
1225#define HAVE_fma_fmaddsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1226#define HAVE_fma_fmaddsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1227#define HAVE_avx512f_fmaddsub_v16sf_mask (TARGET_AVX512F)
1228#define HAVE_avx512f_fmaddsub_v16sf_mask_round (TARGET_AVX512F)
1229#define HAVE_avx512vl_fmaddsub_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1230#define HAVE_avx512vl_fmaddsub_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1231#define HAVE_avx512vl_fmaddsub_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1232#define HAVE_avx512vl_fmaddsub_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1233#define HAVE_avx512f_fmaddsub_v8df_mask (TARGET_AVX512F)
1234#define HAVE_avx512f_fmaddsub_v8df_mask_round (TARGET_AVX512F)
1235#define HAVE_avx512vl_fmaddsub_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1236#define HAVE_avx512vl_fmaddsub_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1237#define HAVE_avx512vl_fmaddsub_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1238#define HAVE_avx512vl_fmaddsub_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1239#define HAVE_avx512f_fmaddsub_v16sf_mask3 (TARGET_AVX512F)
1240#define HAVE_avx512f_fmaddsub_v16sf_mask3_round (TARGET_AVX512F)
1241#define HAVE_avx512vl_fmaddsub_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1242#define HAVE_avx512vl_fmaddsub_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1243#define HAVE_avx512vl_fmaddsub_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1244#define HAVE_avx512vl_fmaddsub_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1245#define HAVE_avx512f_fmaddsub_v8df_mask3 (TARGET_AVX512F)
1246#define HAVE_avx512f_fmaddsub_v8df_mask3_round (TARGET_AVX512F)
1247#define HAVE_avx512vl_fmaddsub_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1248#define HAVE_avx512vl_fmaddsub_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1249#define HAVE_avx512vl_fmaddsub_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1250#define HAVE_avx512vl_fmaddsub_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1251#define HAVE_fma_fmsubadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1252#define HAVE_fma_fmsubadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1253 || V16SFmode == V8DFmode \
1254 || V16SFmode == V8DImode \
1255 || V16SFmode == V16SImode)))
1256#define HAVE_fma_fmsubadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1257#define HAVE_fma_fmsubadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1258#define HAVE_fma_fmsubadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
1259#define HAVE_fma_fmsubadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1260 || V8DFmode == V8DFmode \
1261 || V8DFmode == V8DImode \
1262 || V8DFmode == V16SImode)))
1263#define HAVE_fma_fmsubadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1264#define HAVE_fma_fmsubadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
1265#define HAVE_avx512f_fmsubadd_v16sf_mask (TARGET_AVX512F)
1266#define HAVE_avx512f_fmsubadd_v16sf_mask_round (TARGET_AVX512F)
1267#define HAVE_avx512vl_fmsubadd_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1268#define HAVE_avx512vl_fmsubadd_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1269#define HAVE_avx512vl_fmsubadd_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1270#define HAVE_avx512vl_fmsubadd_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1271#define HAVE_avx512f_fmsubadd_v8df_mask (TARGET_AVX512F)
1272#define HAVE_avx512f_fmsubadd_v8df_mask_round (TARGET_AVX512F)
1273#define HAVE_avx512vl_fmsubadd_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1274#define HAVE_avx512vl_fmsubadd_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1275#define HAVE_avx512vl_fmsubadd_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1276#define HAVE_avx512vl_fmsubadd_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1277#define HAVE_avx512f_fmsubadd_v16sf_mask3 (TARGET_AVX512F)
1278#define HAVE_avx512f_fmsubadd_v16sf_mask3_round (TARGET_AVX512F)
1279#define HAVE_avx512vl_fmsubadd_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1280#define HAVE_avx512vl_fmsubadd_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1281#define HAVE_avx512vl_fmsubadd_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1282#define HAVE_avx512vl_fmsubadd_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1283#define HAVE_avx512f_fmsubadd_v8df_mask3 (TARGET_AVX512F)
1284#define HAVE_avx512f_fmsubadd_v8df_mask3_round (TARGET_AVX512F)
1285#define HAVE_avx512vl_fmsubadd_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1286#define HAVE_avx512vl_fmsubadd_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1287#define HAVE_avx512vl_fmsubadd_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1288#define HAVE_avx512vl_fmsubadd_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1289#define HAVE_sse_cvtpi2ps (TARGET_SSE)
1290#define HAVE_sse_cvtps2pi (TARGET_SSE)
1291#define HAVE_sse_cvttps2pi (TARGET_SSE)
1292#define HAVE_sse_cvtsi2ss (TARGET_SSE)
1293#define HAVE_sse_cvtsi2ss_round ((TARGET_AVX512F) && (TARGET_SSE))
1294#define HAVE_sse_cvtsi2ssq (TARGET_SSE && TARGET_64BIT)
1295#define HAVE_sse_cvtsi2ssq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
1296#define HAVE_sse_cvtss2si (TARGET_SSE)
1297#define HAVE_sse_cvtss2si_round ((TARGET_AVX512F) && (TARGET_SSE))
1298#define HAVE_sse_cvtss2si_2 (TARGET_SSE)
1299#define HAVE_sse_cvtss2siq (TARGET_SSE && TARGET_64BIT)
1300#define HAVE_sse_cvtss2siq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
1301#define HAVE_sse_cvtss2siq_2 (TARGET_SSE && TARGET_64BIT)
1302#define HAVE_sse_cvttss2si (TARGET_SSE)
1303#define HAVE_sse_cvttss2si_round ((TARGET_AVX512F) && (TARGET_SSE))
1304#define HAVE_sse_cvttss2siq (TARGET_SSE && TARGET_64BIT)
1305#define HAVE_sse_cvttss2siq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
1306#define HAVE_cvtusi2ss32 (TARGET_AVX512F && 1)
1307#define HAVE_cvtusi2ss32_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V4SFmode == V4SFmode)))
1308#define HAVE_cvtusi2sd32 ((TARGET_AVX512F && 1) && (TARGET_SSE2))
1309#define HAVE_cvtusi2ss64 (TARGET_AVX512F && TARGET_64BIT)
1310#define HAVE_cvtusi2ss64_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
1311#define HAVE_cvtusi2sd64 ((TARGET_AVX512F && TARGET_64BIT) && (TARGET_SSE2))
1312#define HAVE_cvtusi2sd64_round ((TARGET_AVX512F) && ((TARGET_AVX512F && TARGET_64BIT) && (TARGET_SSE2)))
1313#define HAVE_floatv16siv16sf2 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512F))
1314#define HAVE_floatv16siv16sf2_round ((TARGET_AVX512F) && ((TARGET_SSE2 && 1 && (V16SFmode == V16SFmode \
1315 || V16SFmode == V8DFmode \
1316 || V16SFmode == V8DImode \
1317 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
1318#define HAVE_floatv16siv16sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
1319#define HAVE_floatv16siv16sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
1320 || V16SFmode == V8DFmode \
1321 || V16SFmode == V8DImode \
1322 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
1323#define HAVE_floatv8siv8sf2 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX))
1324#define HAVE_floatv8siv8sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
1325#define HAVE_floatv4siv4sf2 (TARGET_SSE2 && 1 && 1)
1326#define HAVE_floatv4siv4sf2_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && 1))
1327#define HAVE_ufloatv16siv16sf2 (TARGET_AVX512F)
1328#define HAVE_ufloatv16siv16sf2_round (TARGET_AVX512F)
1329#define HAVE_ufloatv16siv16sf2_mask (TARGET_AVX512F)
1330#define HAVE_ufloatv16siv16sf2_mask_round (TARGET_AVX512F)
1331#define HAVE_ufloatv8siv8sf2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1332#define HAVE_ufloatv8siv8sf2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1333#define HAVE_ufloatv8siv8sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1334#define HAVE_ufloatv8siv8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1335#define HAVE_ufloatv4siv4sf2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1336#define HAVE_ufloatv4siv4sf2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1337#define HAVE_ufloatv4siv4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1338#define HAVE_ufloatv4siv4sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1339#define HAVE_avx_fix_notruncv8sfv8si ((TARGET_SSE2 && 1) && (TARGET_AVX))
1340#define HAVE_avx_fix_notruncv8sfv8si_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
1341#define HAVE_sse2_fix_notruncv4sfv4si (TARGET_SSE2 && 1)
1342#define HAVE_sse2_fix_notruncv4sfv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
1343#define HAVE_avx512f_fix_notruncv16sfv16si (TARGET_AVX512F)
1344#define HAVE_avx512f_fix_notruncv16sfv16si_round (TARGET_AVX512F)
1345#define HAVE_avx512f_fix_notruncv16sfv16si_mask (TARGET_AVX512F)
1346#define HAVE_avx512f_fix_notruncv16sfv16si_mask_round (TARGET_AVX512F)
1347#define HAVE_avx512f_ufix_notruncv16sfv16si_mask (TARGET_AVX512F)
1348#define HAVE_avx512f_ufix_notruncv16sfv16si_mask_round (TARGET_AVX512F)
1349#define HAVE_avx512vl_ufix_notruncv8sfv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1350#define HAVE_avx512vl_ufix_notruncv8sfv8si_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1351#define HAVE_avx512vl_ufix_notruncv4sfv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1352#define HAVE_avx512vl_ufix_notruncv4sfv4si_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1353#define HAVE_avx512dq_cvtps2qqv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1354#define HAVE_avx512dq_cvtps2qqv8di_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DImode == V16SFmode \
1355 || V8DImode == V8DFmode \
1356 || V8DImode == V8DImode \
1357 || V8DImode == V16SImode))))
1358#define HAVE_avx512dq_cvtps2qqv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1359#define HAVE_avx512dq_cvtps2qqv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
1360#define HAVE_avx512dq_cvtps2uqqv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1361#define HAVE_avx512dq_cvtps2uqqv8di_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DImode == V16SFmode \
1362 || V8DImode == V8DFmode \
1363 || V8DImode == V8DImode \
1364 || V8DImode == V16SImode))))
1365#define HAVE_avx512dq_cvtps2uqqv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1366#define HAVE_avx512dq_cvtps2uqqv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
1367#define HAVE_fix_truncv16sfv16si2 (TARGET_AVX512F)
1368#define HAVE_fix_truncv16sfv16si2_round (TARGET_AVX512F)
1369#define HAVE_fix_truncv16sfv16si2_mask (TARGET_AVX512F)
1370#define HAVE_fix_truncv16sfv16si2_mask_round (TARGET_AVX512F)
1371#define HAVE_ufix_truncv16sfv16si2 (TARGET_AVX512F)
1372#define HAVE_ufix_truncv16sfv16si2_round (TARGET_AVX512F)
1373#define HAVE_ufix_truncv16sfv16si2_mask (TARGET_AVX512F)
1374#define HAVE_ufix_truncv16sfv16si2_mask_round (TARGET_AVX512F)
1375#define HAVE_fix_truncv8sfv8si2 (TARGET_AVX && 1)
1376#define HAVE_fix_truncv8sfv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1377#define HAVE_fix_truncv4sfv4si2 (TARGET_SSE2 && 1)
1378#define HAVE_fix_truncv4sfv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1379#define HAVE_sse2_cvtpi2pd (TARGET_SSE2)
1380#define HAVE_sse2_cvtpd2pi (TARGET_SSE2)
1381#define HAVE_sse2_cvttpd2pi (TARGET_SSE2)
1382#define HAVE_sse2_cvtsi2sd (TARGET_SSE2)
1383#define HAVE_sse2_cvtsi2sdq (TARGET_SSE2 && TARGET_64BIT)
1384#define HAVE_sse2_cvtsi2sdq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
1385#define HAVE_avx512f_vcvtss2usi (TARGET_AVX512F)
1386#define HAVE_avx512f_vcvtss2usi_round (TARGET_AVX512F)
1387#define HAVE_avx512f_vcvtss2usiq (TARGET_AVX512F && TARGET_64BIT)
1388#define HAVE_avx512f_vcvtss2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
1389#define HAVE_avx512f_vcvttss2usi (TARGET_AVX512F)
1390#define HAVE_avx512f_vcvttss2usi_round (TARGET_AVX512F)
1391#define HAVE_avx512f_vcvttss2usiq (TARGET_AVX512F && TARGET_64BIT)
1392#define HAVE_avx512f_vcvttss2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
1393#define HAVE_avx512f_vcvtsd2usi (TARGET_AVX512F)
1394#define HAVE_avx512f_vcvtsd2usi_round (TARGET_AVX512F)
1395#define HAVE_avx512f_vcvtsd2usiq (TARGET_AVX512F && TARGET_64BIT)
1396#define HAVE_avx512f_vcvtsd2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
1397#define HAVE_avx512f_vcvttsd2usi (TARGET_AVX512F)
1398#define HAVE_avx512f_vcvttsd2usi_round (TARGET_AVX512F)
1399#define HAVE_avx512f_vcvttsd2usiq (TARGET_AVX512F && TARGET_64BIT)
1400#define HAVE_avx512f_vcvttsd2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
1401#define HAVE_sse2_cvtsd2si (TARGET_SSE2)
1402#define HAVE_sse2_cvtsd2si_round ((TARGET_AVX512F) && (TARGET_SSE2))
1403#define HAVE_sse2_cvtsd2si_2 (TARGET_SSE2)
1404#define HAVE_sse2_cvtsd2siq (TARGET_SSE2 && TARGET_64BIT)
1405#define HAVE_sse2_cvtsd2siq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
1406#define HAVE_sse2_cvtsd2siq_2 (TARGET_SSE2 && TARGET_64BIT)
1407#define HAVE_sse2_cvttsd2si (TARGET_SSE2)
1408#define HAVE_sse2_cvttsd2si_round ((TARGET_AVX512F) && (TARGET_SSE2))
1409#define HAVE_sse2_cvttsd2siq (TARGET_SSE2 && TARGET_64BIT)
1410#define HAVE_sse2_cvttsd2siq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
1411#define HAVE_floatv8siv8df2 ((TARGET_AVX && 1) && (TARGET_AVX512F))
1412#define HAVE_floatv8siv8df2_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
1413#define HAVE_floatv4siv4df2 (TARGET_AVX && 1)
1414#define HAVE_floatv4siv4df2_mask ((TARGET_AVX512F) && (TARGET_AVX && (32 == 64 || TARGET_AVX512VL)))
1415#define HAVE_floatv8div8df2 (TARGET_AVX512DQ)
1416#define HAVE_floatv8div8df2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
1417#define HAVE_floatv8div8df2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
1418#define HAVE_floatv8div8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
1419#define HAVE_ufloatv8div8df2 (TARGET_AVX512DQ)
1420#define HAVE_ufloatv8div8df2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
1421#define HAVE_ufloatv8div8df2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
1422#define HAVE_ufloatv8div8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
1423#define HAVE_floatv4div4df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1424#define HAVE_floatv4div4df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1425#define HAVE_floatv4div4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1426#define HAVE_floatv4div4df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
1427#define HAVE_ufloatv4div4df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1428#define HAVE_ufloatv4div4df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1429#define HAVE_ufloatv4div4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1430#define HAVE_ufloatv4div4df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
1431#define HAVE_floatv2div2df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1432#define HAVE_floatv2div2df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1433#define HAVE_floatv2div2df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1434#define HAVE_floatv2div2df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
1435#define HAVE_ufloatv2div2df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1436#define HAVE_ufloatv2div2df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1437#define HAVE_ufloatv2div2df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
1438#define HAVE_ufloatv2div2df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
1439#define HAVE_floatv8div8sf2 (TARGET_AVX512DQ && 1)
1440#define HAVE_floatv8div8sf2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
1441#define HAVE_floatv8div8sf2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1442#define HAVE_floatv8div8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
1443#define HAVE_ufloatv8div8sf2 (TARGET_AVX512DQ && 1)
1444#define HAVE_ufloatv8div8sf2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
1445#define HAVE_ufloatv8div8sf2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1446#define HAVE_ufloatv8div8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
1447#define HAVE_floatv4div4sf2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1448#define HAVE_floatv4div4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1449#define HAVE_ufloatv4div4sf2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1450#define HAVE_ufloatv4div4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1451#define HAVE_floatv2div2sf2_mask (TARGET_AVX512DQ && TARGET_AVX512VL)
1452#define HAVE_ufloatv2div2sf2_mask (TARGET_AVX512DQ && TARGET_AVX512VL)
1453#define HAVE_ufloatv8siv8df2 (TARGET_AVX512F)
1454#define HAVE_ufloatv8siv8df2_mask (TARGET_AVX512F)
1455#define HAVE_ufloatv4siv4df2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1456#define HAVE_ufloatv4siv4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1457#define HAVE_ufloatv2siv2df2 (TARGET_AVX512VL)
1458#define HAVE_ufloatv2siv2df2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1459#define HAVE_avx512f_cvtdq2pd512_2 (TARGET_AVX512F)
1460#define HAVE_avx_cvtdq2pd256_2 (TARGET_AVX)
1461#define HAVE_sse2_cvtdq2pd (TARGET_SSE2 && 1)
1462#define HAVE_sse2_cvtdq2pd_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1463#define HAVE_avx512f_cvtpd2dq512 (TARGET_AVX512F)
1464#define HAVE_avx512f_cvtpd2dq512_round (TARGET_AVX512F)
1465#define HAVE_avx512f_cvtpd2dq512_mask (TARGET_AVX512F)
1466#define HAVE_avx512f_cvtpd2dq512_mask_round (TARGET_AVX512F)
1467#define HAVE_avx_cvtpd2dq256 (TARGET_AVX && 1)
1468#define HAVE_avx_cvtpd2dq256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1469#define HAVE_sse2_cvtpd2dq (TARGET_SSE2 && 1)
1470#define HAVE_sse2_cvtpd2dq_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1471#define HAVE_ufix_notruncv8dfv8si2 (TARGET_AVX512F)
1472#define HAVE_ufix_notruncv8dfv8si2_round (TARGET_AVX512F)
1473#define HAVE_ufix_notruncv8dfv8si2_mask (TARGET_AVX512F)
1474#define HAVE_ufix_notruncv8dfv8si2_mask_round (TARGET_AVX512F)
1475#define HAVE_ufix_notruncv4dfv4si2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1476#define HAVE_ufix_notruncv4dfv4si2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1477#define HAVE_ufix_notruncv4dfv4si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1478#define HAVE_ufix_notruncv4dfv4si2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1479#define HAVE_ufix_notruncv2dfv2si2 (TARGET_AVX512VL)
1480#define HAVE_ufix_notruncv2dfv2si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1481#define HAVE_fix_truncv8dfv8si2 (TARGET_AVX512F)
1482#define HAVE_fix_truncv8dfv8si2_round (TARGET_AVX512F)
1483#define HAVE_fix_truncv8dfv8si2_mask (TARGET_AVX512F)
1484#define HAVE_fix_truncv8dfv8si2_mask_round (TARGET_AVX512F)
1485#define HAVE_ufix_truncv8dfv8si2 (TARGET_AVX512F)
1486#define HAVE_ufix_truncv8dfv8si2_round (TARGET_AVX512F)
1487#define HAVE_ufix_truncv8dfv8si2_mask (TARGET_AVX512F)
1488#define HAVE_ufix_truncv8dfv8si2_mask_round (TARGET_AVX512F)
1489#define HAVE_ufix_truncv2dfv2si2 (TARGET_AVX512VL)
1490#define HAVE_ufix_truncv2dfv2si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1491#define HAVE_fix_truncv4dfv4si2 (TARGET_AVX || (TARGET_AVX512VL && TARGET_AVX512F))
1492#define HAVE_fix_truncv4dfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX || (TARGET_AVX512VL && TARGET_AVX512F)))
1493#define HAVE_ufix_truncv4dfv4si2 (TARGET_AVX512VL && TARGET_AVX512F)
1494#define HAVE_ufix_truncv4dfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL && TARGET_AVX512F))
1495#define HAVE_fix_truncv8dfv8di2 (TARGET_AVX512DQ && 1)
1496#define HAVE_fix_truncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1497 || V8DFmode == V8DFmode \
1498 || V8DFmode == V8DImode \
1499 || V8DFmode == V16SImode)))
1500#define HAVE_fix_truncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1501#define HAVE_fix_truncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1502 || V8DFmode == V8DFmode \
1503 || V8DFmode == V8DImode \
1504 || V8DFmode == V16SImode))))
1505#define HAVE_ufix_truncv8dfv8di2 (TARGET_AVX512DQ && 1)
1506#define HAVE_ufix_truncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1507 || V8DFmode == V8DFmode \
1508 || V8DFmode == V8DImode \
1509 || V8DFmode == V16SImode)))
1510#define HAVE_ufix_truncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1511#define HAVE_ufix_truncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1512 || V8DFmode == V8DFmode \
1513 || V8DFmode == V8DImode \
1514 || V8DFmode == V16SImode))))
1515#define HAVE_fix_truncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1516#define HAVE_fix_truncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1517#define HAVE_ufix_truncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1518#define HAVE_ufix_truncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1519#define HAVE_fix_truncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1520#define HAVE_fix_truncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1521#define HAVE_ufix_truncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1522#define HAVE_ufix_truncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1523#define HAVE_fix_notruncv8dfv8di2 (TARGET_AVX512DQ && 1)
1524#define HAVE_fix_notruncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1525 || V8DFmode == V8DFmode \
1526 || V8DFmode == V8DImode \
1527 || V8DFmode == V16SImode)))
1528#define HAVE_fix_notruncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1529#define HAVE_fix_notruncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1530 || V8DFmode == V8DFmode \
1531 || V8DFmode == V8DImode \
1532 || V8DFmode == V16SImode))))
1533#define HAVE_fix_notruncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1534#define HAVE_fix_notruncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1535#define HAVE_fix_notruncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1536#define HAVE_fix_notruncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1537#define HAVE_ufix_notruncv8dfv8di2 (TARGET_AVX512DQ && 1)
1538#define HAVE_ufix_notruncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1539 || V8DFmode == V8DFmode \
1540 || V8DFmode == V8DImode \
1541 || V8DFmode == V16SImode)))
1542#define HAVE_ufix_notruncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1543#define HAVE_ufix_notruncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
1544 || V8DFmode == V8DFmode \
1545 || V8DFmode == V8DImode \
1546 || V8DFmode == V16SImode))))
1547#define HAVE_ufix_notruncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1548#define HAVE_ufix_notruncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1549#define HAVE_ufix_notruncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1550#define HAVE_ufix_notruncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1551#define HAVE_fix_truncv8sfv8di2 (TARGET_AVX512DQ && 1)
1552#define HAVE_fix_truncv8sfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
1553#define HAVE_fix_truncv8sfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1554#define HAVE_fix_truncv8sfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
1555#define HAVE_ufix_truncv8sfv8di2 (TARGET_AVX512DQ && 1)
1556#define HAVE_ufix_truncv8sfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
1557#define HAVE_ufix_truncv8sfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
1558#define HAVE_ufix_truncv8sfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
1559#define HAVE_fix_truncv4sfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1560#define HAVE_fix_truncv4sfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1561#define HAVE_ufix_truncv4sfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
1562#define HAVE_ufix_truncv4sfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
1563#define HAVE_fix_truncv2sfv2di2 (TARGET_AVX512DQ && TARGET_AVX512VL)
1564#define HAVE_fix_truncv2sfv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
1565#define HAVE_ufix_truncv2sfv2di2 (TARGET_AVX512DQ && TARGET_AVX512VL)
1566#define HAVE_ufix_truncv2sfv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
1567#define HAVE_ufix_truncv8sfv8si2 (TARGET_AVX512VL)
1568#define HAVE_ufix_truncv8sfv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1569#define HAVE_ufix_truncv4sfv4si2 (TARGET_AVX512VL)
1570#define HAVE_ufix_truncv4sfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1571#define HAVE_sse2_cvttpd2dq (TARGET_SSE2 && 1)
1572#define HAVE_sse2_cvttpd2dq_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1573#define HAVE_sse2_cvtsd2ss (TARGET_SSE2)
1574#define HAVE_sse2_cvtsd2ss_round ((TARGET_AVX512F) && (TARGET_SSE2))
1575#define HAVE_sse2_cvtss2sd (TARGET_SSE2)
1576#define HAVE_sse2_cvtss2sd_round ((TARGET_AVX512F) && (TARGET_SSE2))
1577#define HAVE_avx512f_cvtpd2ps512_mask (TARGET_AVX512F)
1578#define HAVE_avx512f_cvtpd2ps512_mask_round (TARGET_AVX512F)
1579#define HAVE_avx_cvtpd2ps256 (TARGET_AVX && 1)
1580#define HAVE_avx_cvtpd2ps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1581#define HAVE_avx512f_cvtps2pd512 ((TARGET_AVX && 1 && 1) && (TARGET_AVX512F))
1582#define HAVE_avx512f_cvtps2pd512_round ((TARGET_AVX512F) && ((TARGET_AVX && 1 && (V8DFmode == V16SFmode \
1583 || V8DFmode == V8DFmode \
1584 || V8DFmode == V8DImode \
1585 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
1586#define HAVE_avx512f_cvtps2pd512_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
1587#define HAVE_avx512f_cvtps2pd512_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
1588 || V8DFmode == V8DFmode \
1589 || V8DFmode == V8DImode \
1590 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
1591#define HAVE_avx_cvtps2pd256 (TARGET_AVX && 1 && 1)
1592#define HAVE_avx_cvtps2pd256_mask ((TARGET_AVX512F) && (TARGET_AVX && (32 == 64 || TARGET_AVX512VL) && 1))
1593#define HAVE_vec_unpacks_lo_v16sf (TARGET_AVX512F)
1594#define HAVE_avx512bw_cvtb2maskv64qi (TARGET_AVX512BW)
1595#define HAVE_avx512vl_cvtb2maskv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1596#define HAVE_avx512vl_cvtb2maskv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1597#define HAVE_avx512bw_cvtw2maskv32hi (TARGET_AVX512BW)
1598#define HAVE_avx512vl_cvtw2maskv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1599#define HAVE_avx512vl_cvtw2maskv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
1600#define HAVE_avx512f_cvtd2maskv16si (TARGET_AVX512DQ)
1601#define HAVE_avx512vl_cvtd2maskv8si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1602#define HAVE_avx512vl_cvtd2maskv4si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1603#define HAVE_avx512f_cvtq2maskv8di (TARGET_AVX512DQ)
1604#define HAVE_avx512vl_cvtq2maskv4di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1605#define HAVE_avx512vl_cvtq2maskv2di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
1606#define HAVE_sse2_cvtps2pd (TARGET_SSE2 && 1)
1607#define HAVE_sse2_cvtps2pd_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1608#define HAVE_sse_movhlps (TARGET_SSE && !(MEM_P (operands[1]) && MEM_P (operands[2])))
1609#define HAVE_sse_movlhps (TARGET_SSE && ix86_binary_operator_ok (UNKNOWN, V4SFmode, operands))
1610#define HAVE_avx512f_unpckhps512_mask (TARGET_AVX512F)
1611#define HAVE_avx_unpckhps256 (TARGET_AVX && 1)
1612#define HAVE_avx_unpckhps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1613#define HAVE_vec_interleave_highv4sf (TARGET_SSE && 1)
1614#define HAVE_vec_interleave_highv4sf_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
1615#define HAVE_avx512f_unpcklps512_mask (TARGET_AVX512F)
1616#define HAVE_avx_unpcklps256 (TARGET_AVX && 1)
1617#define HAVE_avx_unpcklps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1618#define HAVE_unpcklps128_mask (TARGET_AVX512VL)
1619#define HAVE_vec_interleave_lowv4sf (TARGET_SSE)
1620#define HAVE_avx_movshdup256 (TARGET_AVX && 1)
1621#define HAVE_avx_movshdup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1622#define HAVE_sse3_movshdup (TARGET_SSE3 && 1)
1623#define HAVE_sse3_movshdup_mask ((TARGET_AVX512F) && (TARGET_SSE3 && TARGET_AVX512VL))
1624#define HAVE_avx512f_movshdup512_mask (TARGET_AVX512F)
1625#define HAVE_avx_movsldup256 (TARGET_AVX && 1)
1626#define HAVE_avx_movsldup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1627#define HAVE_sse3_movsldup (TARGET_SSE3 && 1)
1628#define HAVE_sse3_movsldup_mask ((TARGET_AVX512F) && (TARGET_SSE3 && TARGET_AVX512VL))
1629#define HAVE_avx512f_movsldup512_mask (TARGET_AVX512F)
1630#define HAVE_avx_shufps256_1 (TARGET_AVX \
1631 && 1 \
1632 && (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
1633 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
1634 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
1635 && INTVAL (operands[6]) == (INTVAL (operands[10]) - 4)))
1636#define HAVE_avx_shufps256_1_mask ((TARGET_AVX512F) && (TARGET_AVX \
1637 && TARGET_AVX512VL \
1638 && (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
1639 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
1640 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
1641 && INTVAL (operands[6]) == (INTVAL (operands[10]) - 4))))
1642#define HAVE_sse_shufps_v4sf_mask (TARGET_AVX512VL)
1643#define HAVE_sse_shufps_v4si (TARGET_SSE)
1644#define HAVE_sse_shufps_v4sf (TARGET_SSE)
1645#define HAVE_sse_storehps (TARGET_SSE && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1646#define HAVE_sse_loadhps (TARGET_SSE)
1647#define HAVE_sse_storelps (TARGET_SSE && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1648#define HAVE_sse_loadlps (TARGET_SSE)
1649#define HAVE_sse_movss (TARGET_SSE)
1650#define HAVE_avx2_vec_dupv8sf ((TARGET_AVX2) && (TARGET_AVX))
1651#define HAVE_avx2_vec_dupv4sf (TARGET_AVX2)
1652#define HAVE_avx2_vec_dupv8sf_1 (TARGET_AVX2)
1653#define HAVE_avx512f_vec_dupv16sf_1 (TARGET_AVX512F)
1654#define HAVE_avx512f_vec_dupv8df_1 (TARGET_AVX512F)
1655#define HAVE_vec_setv4si_0 (TARGET_SSE)
1656#define HAVE_vec_setv4sf_0 (TARGET_SSE)
1657#define HAVE_sse4_1_insertps (TARGET_SSE4_1)
1658#define HAVE_avx512dq_vextractf64x2_1_maskm (TARGET_AVX512DQ \
1659 && INTVAL (operands[2]) % 2 == 0 \
1660 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1661 && rtx_equal_p (operands[4], operands[0]))
1662#define HAVE_avx512dq_vextracti64x2_1_maskm (TARGET_AVX512DQ \
1663 && INTVAL (operands[2]) % 2 == 0 \
1664 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1665 && rtx_equal_p (operands[4], operands[0]))
1666#define HAVE_avx512f_vextractf32x4_1_maskm (TARGET_AVX512F \
1667 && INTVAL (operands[2]) % 4 == 0 \
1668 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1669 && INTVAL (operands[3]) == INTVAL (operands[4]) - 1 \
1670 && INTVAL (operands[4]) == INTVAL (operands[5]) - 1 \
1671 && rtx_equal_p (operands[6], operands[0]))
1672#define HAVE_avx512f_vextracti32x4_1_maskm (TARGET_AVX512F \
1673 && INTVAL (operands[2]) % 4 == 0 \
1674 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1675 && INTVAL (operands[3]) == INTVAL (operands[4]) - 1 \
1676 && INTVAL (operands[4]) == INTVAL (operands[5]) - 1 \
1677 && rtx_equal_p (operands[6], operands[0]))
1678#define HAVE_avx512dq_vextractf64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ \
1679 && INTVAL (operands[2]) % 2 == 0 \
1680 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1))
1681#define HAVE_avx512dq_vextracti64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ \
1682 && INTVAL (operands[2]) % 2 == 0 \
1683 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1))
1684#define HAVE_avx512f_vextractf32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1685 && INTVAL (operands[2]) % 4 == 0 \
1686 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1687 && INTVAL (operands[3]) == INTVAL (operands[4]) - 1 \
1688 && INTVAL (operands[4]) == INTVAL (operands[5]) - 1))
1689#define HAVE_avx512f_vextracti32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1690 && INTVAL (operands[2]) % 4 == 0 \
1691 && INTVAL (operands[2]) == INTVAL (operands[3]) - 1 \
1692 && INTVAL (operands[3]) == INTVAL (operands[4]) - 1 \
1693 && INTVAL (operands[4]) == INTVAL (operands[5]) - 1))
1694#define HAVE_vec_extract_lo_v8df_maskm (TARGET_AVX512F \
1695 && rtx_equal_p (operands[2], operands[0]))
1696#define HAVE_vec_extract_lo_v8di_maskm (TARGET_AVX512F \
1697 && rtx_equal_p (operands[2], operands[0]))
1698#define HAVE_vec_extract_lo_v8df (TARGET_AVX512F \
1699 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1700#define HAVE_vec_extract_lo_v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1701 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1702#define HAVE_vec_extract_lo_v8di (TARGET_AVX512F \
1703 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1704#define HAVE_vec_extract_lo_v8di_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1705 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1706#define HAVE_vec_extract_hi_v8df_maskm (TARGET_AVX512F \
1707 && rtx_equal_p (operands[2], operands[0]))
1708#define HAVE_vec_extract_hi_v8di_maskm (TARGET_AVX512F \
1709 && rtx_equal_p (operands[2], operands[0]))
1710#define HAVE_vec_extract_hi_v8df (TARGET_AVX512F)
1711#define HAVE_vec_extract_hi_v8df_mask (TARGET_AVX512F)
1712#define HAVE_vec_extract_hi_v8di (TARGET_AVX512F)
1713#define HAVE_vec_extract_hi_v8di_mask (TARGET_AVX512F)
1714#define HAVE_vec_extract_hi_v16sf_maskm (TARGET_AVX512DQ \
1715 && rtx_equal_p (operands[2], operands[0]))
1716#define HAVE_vec_extract_hi_v16si_maskm (TARGET_AVX512DQ \
1717 && rtx_equal_p (operands[2], operands[0]))
1718#define HAVE_vec_extract_hi_v16sf (TARGET_AVX512F && 1)
1719#define HAVE_vec_extract_hi_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_AVX512DQ))
1720#define HAVE_vec_extract_hi_v16si (TARGET_AVX512F && 1)
1721#define HAVE_vec_extract_hi_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_AVX512DQ))
1722#define HAVE_vec_extract_lo_v16sf (TARGET_AVX512F \
1723 && 1 \
1724 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1725#define HAVE_vec_extract_lo_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1726 && (64 == 64 || TARGET_AVX512VL) \
1727 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1728#define HAVE_vec_extract_lo_v16si (TARGET_AVX512F \
1729 && 1 \
1730 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1731#define HAVE_vec_extract_lo_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1732 && (64 == 64 || TARGET_AVX512VL) \
1733 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1734#define HAVE_vec_extract_lo_v4di (TARGET_AVX \
1735 && 1 && 1 \
1736 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1737#define HAVE_vec_extract_lo_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX \
1738 && TARGET_AVX512VL && TARGET_AVX512DQ \
1739 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1740#define HAVE_vec_extract_lo_v4df (TARGET_AVX \
1741 && 1 && 1 \
1742 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1743#define HAVE_vec_extract_lo_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX \
1744 && TARGET_AVX512VL && TARGET_AVX512DQ \
1745 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1746#define HAVE_vec_extract_hi_v4di (TARGET_AVX && 1 && 1)
1747#define HAVE_vec_extract_hi_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL && TARGET_AVX512DQ))
1748#define HAVE_vec_extract_hi_v4df (TARGET_AVX && 1 && 1)
1749#define HAVE_vec_extract_hi_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL && TARGET_AVX512DQ))
1750#define HAVE_vec_extract_lo_v8si (TARGET_AVX \
1751 && 1 && 1 \
1752 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1753#define HAVE_vec_extract_lo_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX \
1754 && TARGET_AVX512VL && TARGET_AVX512DQ \
1755 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1756#define HAVE_vec_extract_lo_v8sf (TARGET_AVX \
1757 && 1 && 1 \
1758 && (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
1759#define HAVE_vec_extract_lo_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX \
1760 && TARGET_AVX512VL && TARGET_AVX512DQ \
1761 && (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
1762#define HAVE_vec_extract_lo_v8si_maskm (TARGET_AVX512VL && TARGET_AVX512F \
1763 && rtx_equal_p (operands[2], operands[0]))
1764#define HAVE_vec_extract_lo_v8sf_maskm (TARGET_AVX512VL && TARGET_AVX512F \
1765 && rtx_equal_p (operands[2], operands[0]))
1766#define HAVE_vec_extract_hi_v8si_maskm (TARGET_AVX512F && TARGET_AVX512VL \
1767 && rtx_equal_p (operands[2], operands[0]))
1768#define HAVE_vec_extract_hi_v8sf_maskm (TARGET_AVX512F && TARGET_AVX512VL \
1769 && rtx_equal_p (operands[2], operands[0]))
1770#define HAVE_vec_extract_hi_v8si_mask (TARGET_AVX512VL)
1771#define HAVE_vec_extract_hi_v8sf_mask (TARGET_AVX512VL)
1772#define HAVE_vec_extract_hi_v8si (TARGET_AVX)
1773#define HAVE_vec_extract_hi_v8sf (TARGET_AVX)
1774#define HAVE_vec_extract_lo_v32hi (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1775#define HAVE_vec_extract_hi_v32hi (TARGET_AVX512F)
1776#define HAVE_vec_extract_lo_v16hi (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1777#define HAVE_vec_extract_hi_v16hi (TARGET_AVX)
1778#define HAVE_vec_extract_lo_v64qi (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1779#define HAVE_vec_extract_hi_v64qi (TARGET_AVX512F)
1780#define HAVE_vec_extract_lo_v32qi (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1781#define HAVE_vec_extract_hi_v32qi (TARGET_AVX)
1782#define HAVE_avx512f_unpckhpd512_mask (TARGET_AVX512F)
1783#define HAVE_avx_unpckhpd256 (TARGET_AVX && 1)
1784#define HAVE_avx_unpckhpd256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1785#define HAVE_avx512vl_unpckhpd128_mask (TARGET_AVX512VL)
1786#define HAVE_avx512vl_unpcklpd128_mask (TARGET_AVX512VL)
1787#define HAVE_avx512f_vmscalefv4sf (TARGET_AVX512F)
1788#define HAVE_avx512f_vmscalefv4sf_round (TARGET_AVX512F)
1789#define HAVE_avx512f_vmscalefv2df ((TARGET_AVX512F) && (TARGET_SSE2))
1790#define HAVE_avx512f_vmscalefv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1791#define HAVE_avx512f_scalefv16sf (TARGET_AVX512F)
1792#define HAVE_avx512f_scalefv16sf_round (TARGET_AVX512F)
1793#define HAVE_avx512f_scalefv16sf_mask (TARGET_AVX512F)
1794#define HAVE_avx512f_scalefv16sf_mask_round (TARGET_AVX512F)
1795#define HAVE_avx512vl_scalefv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1796#define HAVE_avx512vl_scalefv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1797#define HAVE_avx512vl_scalefv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1798#define HAVE_avx512vl_scalefv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1799#define HAVE_avx512vl_scalefv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1800#define HAVE_avx512vl_scalefv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1801#define HAVE_avx512vl_scalefv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1802#define HAVE_avx512vl_scalefv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1803#define HAVE_avx512f_scalefv8df (TARGET_AVX512F)
1804#define HAVE_avx512f_scalefv8df_round (TARGET_AVX512F)
1805#define HAVE_avx512f_scalefv8df_mask (TARGET_AVX512F)
1806#define HAVE_avx512f_scalefv8df_mask_round (TARGET_AVX512F)
1807#define HAVE_avx512vl_scalefv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1808#define HAVE_avx512vl_scalefv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1809#define HAVE_avx512vl_scalefv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1810#define HAVE_avx512vl_scalefv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1811#define HAVE_avx512vl_scalefv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1812#define HAVE_avx512vl_scalefv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1813#define HAVE_avx512vl_scalefv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1814#define HAVE_avx512vl_scalefv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1815#define HAVE_avx512f_vternlogv16si (TARGET_AVX512F)
1816#define HAVE_avx512f_vternlogv16si_maskz_1 (TARGET_AVX512F)
1817#define HAVE_avx512vl_vternlogv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
1818#define HAVE_avx512vl_vternlogv8si_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1819#define HAVE_avx512vl_vternlogv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
1820#define HAVE_avx512vl_vternlogv4si_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1821#define HAVE_avx512f_vternlogv8di (TARGET_AVX512F)
1822#define HAVE_avx512f_vternlogv8di_maskz_1 (TARGET_AVX512F)
1823#define HAVE_avx512vl_vternlogv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
1824#define HAVE_avx512vl_vternlogv4di_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1825#define HAVE_avx512vl_vternlogv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
1826#define HAVE_avx512vl_vternlogv2di_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1827#define HAVE_avx512f_vternlogv16si_mask (TARGET_AVX512F)
1828#define HAVE_avx512vl_vternlogv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1829#define HAVE_avx512vl_vternlogv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1830#define HAVE_avx512f_vternlogv8di_mask (TARGET_AVX512F)
1831#define HAVE_avx512vl_vternlogv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1832#define HAVE_avx512vl_vternlogv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1833#define HAVE_avx512f_getexpv16sf (TARGET_AVX512F)
1834#define HAVE_avx512f_getexpv16sf_round (TARGET_AVX512F)
1835#define HAVE_avx512f_getexpv16sf_mask (TARGET_AVX512F)
1836#define HAVE_avx512f_getexpv16sf_mask_round (TARGET_AVX512F)
1837#define HAVE_avx512vl_getexpv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1838#define HAVE_avx512vl_getexpv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1839#define HAVE_avx512vl_getexpv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1840#define HAVE_avx512vl_getexpv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1841#define HAVE_avx512vl_getexpv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1842#define HAVE_avx512vl_getexpv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1843#define HAVE_avx512vl_getexpv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1844#define HAVE_avx512vl_getexpv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1845#define HAVE_avx512f_getexpv8df (TARGET_AVX512F)
1846#define HAVE_avx512f_getexpv8df_round (TARGET_AVX512F)
1847#define HAVE_avx512f_getexpv8df_mask (TARGET_AVX512F)
1848#define HAVE_avx512f_getexpv8df_mask_round (TARGET_AVX512F)
1849#define HAVE_avx512vl_getexpv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1850#define HAVE_avx512vl_getexpv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1851#define HAVE_avx512vl_getexpv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1852#define HAVE_avx512vl_getexpv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1853#define HAVE_avx512vl_getexpv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1854#define HAVE_avx512vl_getexpv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1855#define HAVE_avx512vl_getexpv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1856#define HAVE_avx512vl_getexpv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1857#define HAVE_avx512f_sgetexpv4sf (TARGET_AVX512F)
1858#define HAVE_avx512f_sgetexpv4sf_round (TARGET_AVX512F)
1859#define HAVE_avx512f_sgetexpv2df ((TARGET_AVX512F) && (TARGET_SSE2))
1860#define HAVE_avx512f_sgetexpv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1861#define HAVE_avx512f_alignv16si_mask (TARGET_AVX512F)
1862#define HAVE_avx512vl_alignv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1863#define HAVE_avx512vl_alignv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1864#define HAVE_avx512f_alignv8di_mask (TARGET_AVX512F)
1865#define HAVE_avx512vl_alignv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1866#define HAVE_avx512vl_alignv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1867#define HAVE_avx512f_fixupimmv16sf (TARGET_AVX512F)
1868#define HAVE_avx512f_fixupimmv16sf_round (TARGET_AVX512F)
1869#define HAVE_avx512f_fixupimmv16sf_maskz_1 (TARGET_AVX512F)
1870#define HAVE_avx512f_fixupimmv16sf_maskz_1_round (TARGET_AVX512F)
1871#define HAVE_avx512vl_fixupimmv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1872#define HAVE_avx512vl_fixupimmv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1873#define HAVE_avx512vl_fixupimmv8sf_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1874#define HAVE_avx512vl_fixupimmv8sf_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1875#define HAVE_avx512vl_fixupimmv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1876#define HAVE_avx512vl_fixupimmv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1877#define HAVE_avx512vl_fixupimmv4sf_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1878#define HAVE_avx512vl_fixupimmv4sf_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1879#define HAVE_avx512f_fixupimmv8df (TARGET_AVX512F)
1880#define HAVE_avx512f_fixupimmv8df_round (TARGET_AVX512F)
1881#define HAVE_avx512f_fixupimmv8df_maskz_1 (TARGET_AVX512F)
1882#define HAVE_avx512f_fixupimmv8df_maskz_1_round (TARGET_AVX512F)
1883#define HAVE_avx512vl_fixupimmv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1884#define HAVE_avx512vl_fixupimmv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1885#define HAVE_avx512vl_fixupimmv4df_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1886#define HAVE_avx512vl_fixupimmv4df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1887#define HAVE_avx512vl_fixupimmv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1888#define HAVE_avx512vl_fixupimmv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1889#define HAVE_avx512vl_fixupimmv2df_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
1890#define HAVE_avx512vl_fixupimmv2df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1891#define HAVE_avx512f_fixupimmv16sf_mask (TARGET_AVX512F)
1892#define HAVE_avx512f_fixupimmv16sf_mask_round (TARGET_AVX512F)
1893#define HAVE_avx512vl_fixupimmv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1894#define HAVE_avx512vl_fixupimmv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1895#define HAVE_avx512vl_fixupimmv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1896#define HAVE_avx512vl_fixupimmv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1897#define HAVE_avx512f_fixupimmv8df_mask (TARGET_AVX512F)
1898#define HAVE_avx512f_fixupimmv8df_mask_round (TARGET_AVX512F)
1899#define HAVE_avx512vl_fixupimmv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1900#define HAVE_avx512vl_fixupimmv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1901#define HAVE_avx512vl_fixupimmv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
1902#define HAVE_avx512vl_fixupimmv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1903#define HAVE_avx512f_sfixupimmv4sf (TARGET_AVX512F)
1904#define HAVE_avx512f_sfixupimmv4sf_round (TARGET_AVX512F)
1905#define HAVE_avx512f_sfixupimmv4sf_maskz_1 (TARGET_AVX512F)
1906#define HAVE_avx512f_sfixupimmv4sf_maskz_1_round (TARGET_AVX512F)
1907#define HAVE_avx512f_sfixupimmv2df ((TARGET_AVX512F) && (TARGET_SSE2))
1908#define HAVE_avx512f_sfixupimmv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1909#define HAVE_avx512f_sfixupimmv2df_maskz_1 ((TARGET_AVX512F) && (TARGET_SSE2))
1910#define HAVE_avx512f_sfixupimmv2df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1911#define HAVE_avx512f_sfixupimmv4sf_mask (TARGET_AVX512F)
1912#define HAVE_avx512f_sfixupimmv4sf_mask_round (TARGET_AVX512F)
1913#define HAVE_avx512f_sfixupimmv2df_mask ((TARGET_AVX512F) && (TARGET_SSE2))
1914#define HAVE_avx512f_sfixupimmv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1915#define HAVE_avx512f_rndscalev16sf (TARGET_AVX512F)
1916#define HAVE_avx512f_rndscalev16sf_round (TARGET_AVX512F)
1917#define HAVE_avx512f_rndscalev16sf_mask (TARGET_AVX512F)
1918#define HAVE_avx512f_rndscalev16sf_mask_round (TARGET_AVX512F)
1919#define HAVE_avx512vl_rndscalev8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1920#define HAVE_avx512vl_rndscalev8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1921#define HAVE_avx512vl_rndscalev8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1922#define HAVE_avx512vl_rndscalev8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1923#define HAVE_avx512vl_rndscalev4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
1924#define HAVE_avx512vl_rndscalev4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1925#define HAVE_avx512vl_rndscalev4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1926#define HAVE_avx512vl_rndscalev4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1927#define HAVE_avx512f_rndscalev8df (TARGET_AVX512F)
1928#define HAVE_avx512f_rndscalev8df_round (TARGET_AVX512F)
1929#define HAVE_avx512f_rndscalev8df_mask (TARGET_AVX512F)
1930#define HAVE_avx512f_rndscalev8df_mask_round (TARGET_AVX512F)
1931#define HAVE_avx512vl_rndscalev4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1932#define HAVE_avx512vl_rndscalev4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1933#define HAVE_avx512vl_rndscalev4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1934#define HAVE_avx512vl_rndscalev4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1935#define HAVE_avx512vl_rndscalev2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
1936#define HAVE_avx512vl_rndscalev2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1937#define HAVE_avx512vl_rndscalev2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
1938#define HAVE_avx512vl_rndscalev2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
1939#define HAVE_avx512f_rndscalev4sf (TARGET_AVX512F)
1940#define HAVE_avx512f_rndscalev4sf_round (TARGET_AVX512F)
1941#define HAVE_avx512f_rndscalev2df ((TARGET_AVX512F) && (TARGET_SSE2))
1942#define HAVE_avx512f_rndscalev2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
1943#define HAVE_avx512f_shufps512_1 (TARGET_AVX512F \
1944 && (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
1945 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
1946 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
1947 && INTVAL (operands[6]) == (INTVAL (operands[10]) - 4) \
1948 && INTVAL (operands[3]) == (INTVAL (operands[11]) - 8) \
1949 && INTVAL (operands[4]) == (INTVAL (operands[12]) - 8) \
1950 && INTVAL (operands[5]) == (INTVAL (operands[13]) - 8) \
1951 && INTVAL (operands[6]) == (INTVAL (operands[14]) - 8) \
1952 && INTVAL (operands[3]) == (INTVAL (operands[15]) - 12) \
1953 && INTVAL (operands[4]) == (INTVAL (operands[16]) - 12) \
1954 && INTVAL (operands[5]) == (INTVAL (operands[17]) - 12) \
1955 && INTVAL (operands[6]) == (INTVAL (operands[18]) - 12)))
1956#define HAVE_avx512f_shufps512_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
1957 && (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
1958 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
1959 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
1960 && INTVAL (operands[6]) == (INTVAL (operands[10]) - 4) \
1961 && INTVAL (operands[3]) == (INTVAL (operands[11]) - 8) \
1962 && INTVAL (operands[4]) == (INTVAL (operands[12]) - 8) \
1963 && INTVAL (operands[5]) == (INTVAL (operands[13]) - 8) \
1964 && INTVAL (operands[6]) == (INTVAL (operands[14]) - 8) \
1965 && INTVAL (operands[3]) == (INTVAL (operands[15]) - 12) \
1966 && INTVAL (operands[4]) == (INTVAL (operands[16]) - 12) \
1967 && INTVAL (operands[5]) == (INTVAL (operands[17]) - 12) \
1968 && INTVAL (operands[6]) == (INTVAL (operands[18]) - 12))))
1969#define HAVE_avx512f_shufpd512_1 (TARGET_AVX512F)
1970#define HAVE_avx512f_shufpd512_1_mask (TARGET_AVX512F)
1971#define HAVE_avx_shufpd256_1 (TARGET_AVX && 1)
1972#define HAVE_avx_shufpd256_1_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
1973#define HAVE_sse2_shufpd_v2df_mask (TARGET_AVX512VL)
1974#define HAVE_avx2_interleave_highv4di (TARGET_AVX2 && 1)
1975#define HAVE_avx2_interleave_highv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
1976#define HAVE_avx512f_interleave_highv8di_mask (TARGET_AVX512F)
1977#define HAVE_vec_interleave_highv2di (TARGET_SSE2 && 1)
1978#define HAVE_vec_interleave_highv2di_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1979#define HAVE_avx2_interleave_lowv4di (TARGET_AVX2 && 1)
1980#define HAVE_avx2_interleave_lowv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
1981#define HAVE_avx512f_interleave_lowv8di_mask (TARGET_AVX512F)
1982#define HAVE_vec_interleave_lowv2di (TARGET_SSE2 && 1)
1983#define HAVE_vec_interleave_lowv2di_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1984#define HAVE_sse2_shufpd_v2di (TARGET_SSE2)
1985#define HAVE_sse2_shufpd_v2df (TARGET_SSE2)
1986#define HAVE_sse2_storehpd (TARGET_SSE2 && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1987#define HAVE_sse2_storelpd (TARGET_SSE2 && !(MEM_P (operands[0]) && MEM_P (operands[1])))
1988#define HAVE_sse2_loadhpd (TARGET_SSE2 && !(MEM_P (operands[1]) && MEM_P (operands[2])))
1989#define HAVE_sse2_loadlpd (TARGET_SSE2 && !(MEM_P (operands[1]) && MEM_P (operands[2])))
1990#define HAVE_sse2_movsd (TARGET_SSE2)
1991#define HAVE_vec_dupv2df (TARGET_SSE2 && 1)
1992#define HAVE_vec_dupv2df_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
1993#define HAVE_vec_concatv2df (TARGET_SSE \
1994 && (!(MEM_P (operands[1]) && MEM_P (operands[2])) \
1995 || (TARGET_SSE3 && rtx_equal_p (operands[1], operands[2]))))
1996#define HAVE_avx512f_ss_truncatev16siv16qi2_mask (TARGET_AVX512F)
1997#define HAVE_avx512f_truncatev16siv16qi2_mask (TARGET_AVX512F)
1998#define HAVE_avx512f_us_truncatev16siv16qi2_mask (TARGET_AVX512F)
1999#define HAVE_avx512f_ss_truncatev16siv16hi2_mask (TARGET_AVX512F)
2000#define HAVE_avx512f_truncatev16siv16hi2_mask (TARGET_AVX512F)
2001#define HAVE_avx512f_us_truncatev16siv16hi2_mask (TARGET_AVX512F)
2002#define HAVE_avx512f_ss_truncatev8div8si2_mask (TARGET_AVX512F)
2003#define HAVE_avx512f_truncatev8div8si2_mask (TARGET_AVX512F)
2004#define HAVE_avx512f_us_truncatev8div8si2_mask (TARGET_AVX512F)
2005#define HAVE_avx512f_ss_truncatev8div8hi2_mask (TARGET_AVX512F)
2006#define HAVE_avx512f_truncatev8div8hi2_mask (TARGET_AVX512F)
2007#define HAVE_avx512f_us_truncatev8div8hi2_mask (TARGET_AVX512F)
2008#define HAVE_avx512bw_ss_truncatev32hiv32qi2 (TARGET_AVX512BW)
2009#define HAVE_avx512bw_truncatev32hiv32qi2 (TARGET_AVX512BW)
2010#define HAVE_avx512bw_us_truncatev32hiv32qi2 (TARGET_AVX512BW)
2011#define HAVE_avx512bw_ss_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
2012#define HAVE_avx512bw_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
2013#define HAVE_avx512bw_us_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
2014#define HAVE_avx512vl_ss_truncatev4div4si2_mask (TARGET_AVX512VL)
2015#define HAVE_avx512vl_truncatev4div4si2_mask (TARGET_AVX512VL)
2016#define HAVE_avx512vl_us_truncatev4div4si2_mask (TARGET_AVX512VL)
2017#define HAVE_avx512vl_ss_truncatev8siv8hi2_mask (TARGET_AVX512VL)
2018#define HAVE_avx512vl_truncatev8siv8hi2_mask (TARGET_AVX512VL)
2019#define HAVE_avx512vl_us_truncatev8siv8hi2_mask (TARGET_AVX512VL)
2020#define HAVE_avx512vl_ss_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2021#define HAVE_avx512vl_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2022#define HAVE_avx512vl_us_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2023#define HAVE_avx512vl_ss_truncatev2div2qi2_mask (TARGET_AVX512VL)
2024#define HAVE_avx512vl_truncatev2div2qi2_mask (TARGET_AVX512VL)
2025#define HAVE_avx512vl_us_truncatev2div2qi2_mask (TARGET_AVX512VL)
2026#define HAVE_avx512vl_ss_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
2027#define HAVE_avx512vl_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
2028#define HAVE_avx512vl_us_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
2029#define HAVE_avx512vl_ss_truncatev4siv4qi2_mask (TARGET_AVX512VL)
2030#define HAVE_avx512vl_truncatev4siv4qi2_mask (TARGET_AVX512VL)
2031#define HAVE_avx512vl_us_truncatev4siv4qi2_mask (TARGET_AVX512VL)
2032#define HAVE_avx512vl_ss_truncatev4div4qi2_mask (TARGET_AVX512VL)
2033#define HAVE_avx512vl_truncatev4div4qi2_mask (TARGET_AVX512VL)
2034#define HAVE_avx512vl_us_truncatev4div4qi2_mask (TARGET_AVX512VL)
2035#define HAVE_avx512vl_ss_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
2036#define HAVE_avx512vl_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
2037#define HAVE_avx512vl_us_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
2038#define HAVE_avx512vl_ss_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
2039#define HAVE_avx512vl_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
2040#define HAVE_avx512vl_us_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
2041#define HAVE_avx512vl_ss_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2042#define HAVE_avx512vl_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2043#define HAVE_avx512vl_us_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2044#define HAVE_avx512vl_ss_truncatev8siv8qi2_mask (TARGET_AVX512VL)
2045#define HAVE_avx512vl_truncatev8siv8qi2_mask (TARGET_AVX512VL)
2046#define HAVE_avx512vl_us_truncatev8siv8qi2_mask (TARGET_AVX512VL)
2047#define HAVE_avx512vl_ss_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2048#define HAVE_avx512vl_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2049#define HAVE_avx512vl_us_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
2050#define HAVE_avx512vl_ss_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
2051#define HAVE_avx512vl_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
2052#define HAVE_avx512vl_us_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
2053#define HAVE_avx512vl_ss_truncatev4siv4hi2_mask (TARGET_AVX512VL)
2054#define HAVE_avx512vl_truncatev4siv4hi2_mask (TARGET_AVX512VL)
2055#define HAVE_avx512vl_us_truncatev4siv4hi2_mask (TARGET_AVX512VL)
2056#define HAVE_avx512vl_ss_truncatev4div4hi2_mask (TARGET_AVX512VL)
2057#define HAVE_avx512vl_truncatev4div4hi2_mask (TARGET_AVX512VL)
2058#define HAVE_avx512vl_us_truncatev4div4hi2_mask (TARGET_AVX512VL)
2059#define HAVE_avx512vl_ss_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
2060#define HAVE_avx512vl_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
2061#define HAVE_avx512vl_us_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
2062#define HAVE_avx512vl_ss_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
2063#define HAVE_avx512vl_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
2064#define HAVE_avx512vl_us_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
2065#define HAVE_avx512vl_ss_truncatev2div2hi2_mask (TARGET_AVX512VL)
2066#define HAVE_avx512vl_truncatev2div2hi2_mask (TARGET_AVX512VL)
2067#define HAVE_avx512vl_us_truncatev2div2hi2_mask (TARGET_AVX512VL)
2068#define HAVE_avx512vl_ss_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
2069#define HAVE_avx512vl_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
2070#define HAVE_avx512vl_us_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
2071#define HAVE_avx512vl_ss_truncatev2div2si2_mask (TARGET_AVX512VL)
2072#define HAVE_avx512vl_truncatev2div2si2_mask (TARGET_AVX512VL)
2073#define HAVE_avx512vl_us_truncatev2div2si2_mask (TARGET_AVX512VL)
2074#define HAVE_avx512vl_ss_truncatev2div2si2_mask_store (TARGET_AVX512VL)
2075#define HAVE_avx512vl_truncatev2div2si2_mask_store (TARGET_AVX512VL)
2076#define HAVE_avx512vl_us_truncatev2div2si2_mask_store (TARGET_AVX512VL)
2077#define HAVE_avx512f_ss_truncatev8div16qi2_mask (TARGET_AVX512F)
2078#define HAVE_avx512f_truncatev8div16qi2_mask (TARGET_AVX512F)
2079#define HAVE_avx512f_us_truncatev8div16qi2_mask (TARGET_AVX512F)
2080#define HAVE_avx512f_ss_truncatev8div16qi2_mask_store (TARGET_AVX512F)
2081#define HAVE_avx512f_truncatev8div16qi2_mask_store (TARGET_AVX512F)
2082#define HAVE_avx512f_us_truncatev8div16qi2_mask_store (TARGET_AVX512F)
2083#define HAVE_avx512bw_pmaddwd512v32hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512BW))
2084#define HAVE_avx512bw_pmaddwd512v32hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512BW)))
2085#define HAVE_avx512bw_pmaddwd512v16hi ((TARGET_AVX512BW && 1) && (TARGET_AVX2))
2086#define HAVE_avx512bw_pmaddwd512v16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
2087#define HAVE_avx512bw_pmaddwd512v8hi (TARGET_AVX512BW && 1)
2088#define HAVE_avx512bw_pmaddwd512v8hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL)))
2089#define HAVE_avx512dq_mulv8di3 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512F))
2090#define HAVE_avx512dq_mulv8di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
2091#define HAVE_avx512dq_mulv4di3 ((TARGET_AVX512DQ && 1) && (TARGET_AVX))
2092#define HAVE_avx512dq_mulv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
2093#define HAVE_avx512dq_mulv2di3 (TARGET_AVX512DQ && 1)
2094#define HAVE_avx512dq_mulv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && (16 == 64 || TARGET_AVX512VL)))
2095#define HAVE_ashrv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512VL) && (TARGET_AVX512BW)))
2096#define HAVE_ashrv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512VL) && (TARGET_AVX512BW)))
2097#define HAVE_ashrv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2098#define HAVE_ashrv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2099#define HAVE_ashrv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
2100#define HAVE_ashrv8hi3 (TARGET_SSE2)
2101#define HAVE_ashrv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
2102#define HAVE_ashrv4si3 (TARGET_SSE2)
2103#define HAVE_ashrv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2104#define HAVE_ashrv32hi3 ((TARGET_AVX512F) && (TARGET_AVX512BW))
2105#define HAVE_ashrv32hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512BW)))
2106#define HAVE_ashrv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2107#define HAVE_ashrv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2108#define HAVE_ashrv16si3 (TARGET_AVX512F)
2109#define HAVE_ashrv16si3_mask (TARGET_AVX512F)
2110#define HAVE_ashrv8di3 (TARGET_AVX512F)
2111#define HAVE_ashrv8di3_mask (TARGET_AVX512F)
2112#define HAVE_ashlv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
2113#define HAVE_ashlv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2114#define HAVE_lshrv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
2115#define HAVE_lshrv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2116#define HAVE_ashlv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
2117#define HAVE_ashlv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2118#define HAVE_lshrv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
2119#define HAVE_lshrv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2120#define HAVE_ashlv8hi3 (TARGET_SSE2 && 1 && 1)
2121#define HAVE_ashlv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2122#define HAVE_lshrv8hi3 (TARGET_SSE2 && 1 && 1)
2123#define HAVE_lshrv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2124#define HAVE_ashlv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
2125#define HAVE_ashlv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
2126#define HAVE_lshrv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
2127#define HAVE_lshrv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
2128#define HAVE_ashlv4si3 (TARGET_SSE2 && 1)
2129#define HAVE_ashlv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
2130#define HAVE_lshrv4si3 (TARGET_SSE2 && 1)
2131#define HAVE_lshrv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
2132#define HAVE_ashlv4di3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
2133#define HAVE_ashlv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
2134#define HAVE_lshrv4di3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
2135#define HAVE_lshrv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
2136#define HAVE_ashlv2di3 (TARGET_SSE2 && 1)
2137#define HAVE_ashlv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
2138#define HAVE_lshrv2di3 (TARGET_SSE2 && 1)
2139#define HAVE_lshrv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
2140#define HAVE_ashlv16si3 (TARGET_AVX512F && 1)
2141#define HAVE_ashlv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
2142#define HAVE_lshrv16si3 (TARGET_AVX512F && 1)
2143#define HAVE_lshrv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
2144#define HAVE_ashlv8di3 (TARGET_AVX512F && 1)
2145#define HAVE_ashlv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
2146#define HAVE_lshrv8di3 (TARGET_AVX512F && 1)
2147#define HAVE_lshrv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
2148#define HAVE_avx512bw_ashlv4ti3 ((TARGET_SSE2) && (TARGET_AVX512BW))
2149#define HAVE_avx2_ashlv2ti3 ((TARGET_SSE2) && (TARGET_AVX2))
2150#define HAVE_sse2_ashlv1ti3 (TARGET_SSE2)
2151#define HAVE_avx512bw_lshrv4ti3 ((TARGET_SSE2) && (TARGET_AVX512BW))
2152#define HAVE_avx2_lshrv2ti3 ((TARGET_SSE2) && (TARGET_AVX2))
2153#define HAVE_sse2_lshrv1ti3 (TARGET_SSE2)
2154#define HAVE_avx512f_rolvv16si (TARGET_AVX512F)
2155#define HAVE_avx512f_rolvv16si_mask (TARGET_AVX512F)
2156#define HAVE_avx512f_rorvv16si (TARGET_AVX512F)
2157#define HAVE_avx512f_rorvv16si_mask (TARGET_AVX512F)
2158#define HAVE_avx512vl_rolvv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2159#define HAVE_avx512vl_rolvv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2160#define HAVE_avx512vl_rorvv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2161#define HAVE_avx512vl_rorvv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2162#define HAVE_avx512vl_rolvv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2163#define HAVE_avx512vl_rolvv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2164#define HAVE_avx512vl_rorvv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2165#define HAVE_avx512vl_rorvv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2166#define HAVE_avx512f_rolvv8di (TARGET_AVX512F)
2167#define HAVE_avx512f_rolvv8di_mask (TARGET_AVX512F)
2168#define HAVE_avx512f_rorvv8di (TARGET_AVX512F)
2169#define HAVE_avx512f_rorvv8di_mask (TARGET_AVX512F)
2170#define HAVE_avx512vl_rolvv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2171#define HAVE_avx512vl_rolvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2172#define HAVE_avx512vl_rorvv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2173#define HAVE_avx512vl_rorvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2174#define HAVE_avx512vl_rolvv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2175#define HAVE_avx512vl_rolvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2176#define HAVE_avx512vl_rorvv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2177#define HAVE_avx512vl_rorvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2178#define HAVE_avx512f_rolv16si (TARGET_AVX512F)
2179#define HAVE_avx512f_rolv16si_mask (TARGET_AVX512F)
2180#define HAVE_avx512f_rorv16si (TARGET_AVX512F)
2181#define HAVE_avx512f_rorv16si_mask (TARGET_AVX512F)
2182#define HAVE_avx512vl_rolv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2183#define HAVE_avx512vl_rolv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2184#define HAVE_avx512vl_rorv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2185#define HAVE_avx512vl_rorv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2186#define HAVE_avx512vl_rolv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2187#define HAVE_avx512vl_rolv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2188#define HAVE_avx512vl_rorv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
2189#define HAVE_avx512vl_rorv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2190#define HAVE_avx512f_rolv8di (TARGET_AVX512F)
2191#define HAVE_avx512f_rolv8di_mask (TARGET_AVX512F)
2192#define HAVE_avx512f_rorv8di (TARGET_AVX512F)
2193#define HAVE_avx512f_rorv8di_mask (TARGET_AVX512F)
2194#define HAVE_avx512vl_rolv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2195#define HAVE_avx512vl_rolv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2196#define HAVE_avx512vl_rorv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2197#define HAVE_avx512vl_rorv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2198#define HAVE_avx512vl_rolv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2199#define HAVE_avx512vl_rolv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2200#define HAVE_avx512vl_rorv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
2201#define HAVE_avx512vl_rorv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2202#define HAVE_smaxv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2203#define HAVE_sminv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2204#define HAVE_umaxv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2205#define HAVE_uminv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2206#define HAVE_smaxv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2207#define HAVE_sminv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2208#define HAVE_umaxv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2209#define HAVE_uminv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2210#define HAVE_smaxv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2211#define HAVE_sminv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2212#define HAVE_umaxv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2213#define HAVE_uminv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2214#define HAVE_smaxv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2215#define HAVE_sminv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2216#define HAVE_umaxv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2217#define HAVE_uminv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2218#define HAVE_smaxv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2219#define HAVE_sminv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2220#define HAVE_umaxv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2221#define HAVE_uminv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2222#define HAVE_smaxv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2223#define HAVE_sminv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2224#define HAVE_umaxv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2225#define HAVE_uminv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2226#define HAVE_avx512bw_eqv64qi3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V64QImode, operands))
2227#define HAVE_avx512bw_eqv64qi3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V64QImode, operands)))
2228#define HAVE_avx512vl_eqv16qi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16QImode, operands)) && (TARGET_AVX512VL))
2229#define HAVE_avx512vl_eqv16qi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16QImode, operands)) && (TARGET_AVX512VL)))
2230#define HAVE_avx512vl_eqv32qi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32QImode, operands)) && (TARGET_AVX512VL))
2231#define HAVE_avx512vl_eqv32qi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32QImode, operands)) && (TARGET_AVX512VL)))
2232#define HAVE_avx512bw_eqv32hi3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32HImode, operands))
2233#define HAVE_avx512bw_eqv32hi3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32HImode, operands)))
2234#define HAVE_avx512vl_eqv16hi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16HImode, operands)) && (TARGET_AVX512VL))
2235#define HAVE_avx512vl_eqv16hi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16HImode, operands)) && (TARGET_AVX512VL)))
2236#define HAVE_avx512vl_eqv8hi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8HImode, operands)) && (TARGET_AVX512VL))
2237#define HAVE_avx512vl_eqv8hi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8HImode, operands)) && (TARGET_AVX512VL)))
2238#define HAVE_avx512f_eqv16si3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16SImode, operands))
2239#define HAVE_avx512f_eqv16si3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16SImode, operands)))
2240#define HAVE_avx512vl_eqv8si3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8SImode, operands)) && (TARGET_AVX512VL))
2241#define HAVE_avx512vl_eqv8si3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8SImode, operands)) && (TARGET_AVX512VL)))
2242#define HAVE_avx512vl_eqv4si3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4SImode, operands)) && (TARGET_AVX512VL))
2243#define HAVE_avx512vl_eqv4si3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4SImode, operands)) && (TARGET_AVX512VL)))
2244#define HAVE_avx512f_eqv8di3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8DImode, operands))
2245#define HAVE_avx512f_eqv8di3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8DImode, operands)))
2246#define HAVE_avx512vl_eqv4di3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4DImode, operands)) && (TARGET_AVX512VL))
2247#define HAVE_avx512vl_eqv4di3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4DImode, operands)) && (TARGET_AVX512VL)))
2248#define HAVE_avx512vl_eqv2di3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V2DImode, operands)) && (TARGET_AVX512VL))
2249#define HAVE_avx512vl_eqv2di3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V2DImode, operands)) && (TARGET_AVX512VL)))
2250#define HAVE_sse4_2_gtv2di3 (TARGET_SSE4_2)
2251#define HAVE_avx2_gtv32qi3 (TARGET_AVX2)
2252#define HAVE_avx2_gtv16hi3 (TARGET_AVX2)
2253#define HAVE_avx2_gtv8si3 (TARGET_AVX2)
2254#define HAVE_avx2_gtv4di3 (TARGET_AVX2)
2255#define HAVE_avx512f_gtv16si3 (TARGET_AVX512F)
2256#define HAVE_avx512f_gtv16si3_mask (TARGET_AVX512F)
2257#define HAVE_avx512vl_gtv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2258#define HAVE_avx512vl_gtv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2259#define HAVE_avx512vl_gtv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2260#define HAVE_avx512vl_gtv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2261#define HAVE_avx512f_gtv8di3 (TARGET_AVX512F)
2262#define HAVE_avx512f_gtv8di3_mask (TARGET_AVX512F)
2263#define HAVE_avx512vl_gtv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2264#define HAVE_avx512vl_gtv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2265#define HAVE_avx512vl_gtv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2266#define HAVE_avx512vl_gtv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2267#define HAVE_avx512bw_gtv64qi3 (TARGET_AVX512BW)
2268#define HAVE_avx512bw_gtv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2269#define HAVE_avx512vl_gtv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2270#define HAVE_avx512vl_gtv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2271#define HAVE_avx512vl_gtv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2272#define HAVE_avx512vl_gtv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2273#define HAVE_avx512bw_gtv32hi3 (TARGET_AVX512BW)
2274#define HAVE_avx512bw_gtv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2275#define HAVE_avx512vl_gtv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2276#define HAVE_avx512vl_gtv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2277#define HAVE_avx512vl_gtv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2278#define HAVE_avx512vl_gtv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2279#define HAVE_sse2_gtv16qi3 (TARGET_SSE2 && !TARGET_XOP)
2280#define HAVE_sse2_gtv8hi3 (TARGET_SSE2 && !TARGET_XOP)
2281#define HAVE_sse2_gtv4si3 (TARGET_SSE2 && !TARGET_XOP)
2282#define HAVE_andv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2283 && ix86_binary_operator_ok (AND, V16SImode, operands)) && (TARGET_AVX512F)))
2284#define HAVE_iorv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2285 && ix86_binary_operator_ok (IOR, V16SImode, operands)) && (TARGET_AVX512F)))
2286#define HAVE_xorv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2287 && ix86_binary_operator_ok (XOR, V16SImode, operands)) && (TARGET_AVX512F)))
2288#define HAVE_andv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2289 && ix86_binary_operator_ok (AND, V8SImode, operands)) && (TARGET_AVX)))
2290#define HAVE_iorv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2291 && ix86_binary_operator_ok (IOR, V8SImode, operands)) && (TARGET_AVX)))
2292#define HAVE_xorv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2293 && ix86_binary_operator_ok (XOR, V8SImode, operands)) && (TARGET_AVX)))
2294#define HAVE_andv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2295 && ix86_binary_operator_ok (AND, V4SImode, operands)))
2296#define HAVE_iorv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2297 && ix86_binary_operator_ok (IOR, V4SImode, operands)))
2298#define HAVE_xorv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2299 && ix86_binary_operator_ok (XOR, V4SImode, operands)))
2300#define HAVE_andv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2301 && ix86_binary_operator_ok (AND, V8DImode, operands)) && (TARGET_AVX512F)))
2302#define HAVE_iorv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2303 && ix86_binary_operator_ok (IOR, V8DImode, operands)) && (TARGET_AVX512F)))
2304#define HAVE_xorv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
2305 && ix86_binary_operator_ok (XOR, V8DImode, operands)) && (TARGET_AVX512F)))
2306#define HAVE_andv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2307 && ix86_binary_operator_ok (AND, V4DImode, operands)) && (TARGET_AVX)))
2308#define HAVE_iorv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2309 && ix86_binary_operator_ok (IOR, V4DImode, operands)) && (TARGET_AVX)))
2310#define HAVE_xorv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
2311 && ix86_binary_operator_ok (XOR, V4DImode, operands)) && (TARGET_AVX)))
2312#define HAVE_andv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2313 && ix86_binary_operator_ok (AND, V2DImode, operands)))
2314#define HAVE_iorv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2315 && ix86_binary_operator_ok (IOR, V2DImode, operands)))
2316#define HAVE_xorv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
2317 && ix86_binary_operator_ok (XOR, V2DImode, operands)))
2318#define HAVE_avx512bw_testmv64qi3 (TARGET_AVX512BW)
2319#define HAVE_avx512bw_testmv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2320#define HAVE_avx512vl_testmv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2321#define HAVE_avx512vl_testmv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2322#define HAVE_avx512vl_testmv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2323#define HAVE_avx512vl_testmv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2324#define HAVE_avx512bw_testmv32hi3 (TARGET_AVX512BW)
2325#define HAVE_avx512bw_testmv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2326#define HAVE_avx512vl_testmv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2327#define HAVE_avx512vl_testmv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2328#define HAVE_avx512vl_testmv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2329#define HAVE_avx512vl_testmv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2330#define HAVE_avx512f_testmv16si3 (TARGET_AVX512F)
2331#define HAVE_avx512f_testmv16si3_mask (TARGET_AVX512F)
2332#define HAVE_avx512vl_testmv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2333#define HAVE_avx512vl_testmv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2334#define HAVE_avx512vl_testmv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2335#define HAVE_avx512vl_testmv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2336#define HAVE_avx512f_testmv8di3 (TARGET_AVX512F)
2337#define HAVE_avx512f_testmv8di3_mask (TARGET_AVX512F)
2338#define HAVE_avx512vl_testmv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2339#define HAVE_avx512vl_testmv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2340#define HAVE_avx512vl_testmv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2341#define HAVE_avx512vl_testmv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2342#define HAVE_avx512bw_testnmv64qi3 (TARGET_AVX512BW)
2343#define HAVE_avx512bw_testnmv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2344#define HAVE_avx512vl_testnmv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2345#define HAVE_avx512vl_testnmv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2346#define HAVE_avx512vl_testnmv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2347#define HAVE_avx512vl_testnmv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2348#define HAVE_avx512bw_testnmv32hi3 (TARGET_AVX512BW)
2349#define HAVE_avx512bw_testnmv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2350#define HAVE_avx512vl_testnmv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2351#define HAVE_avx512vl_testnmv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2352#define HAVE_avx512vl_testnmv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2353#define HAVE_avx512vl_testnmv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2354#define HAVE_avx512f_testnmv16si3 (TARGET_AVX512F)
2355#define HAVE_avx512f_testnmv16si3_mask (TARGET_AVX512F)
2356#define HAVE_avx512vl_testnmv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2357#define HAVE_avx512vl_testnmv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2358#define HAVE_avx512vl_testnmv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2359#define HAVE_avx512vl_testnmv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2360#define HAVE_avx512f_testnmv8di3 (TARGET_AVX512F)
2361#define HAVE_avx512f_testnmv8di3_mask (TARGET_AVX512F)
2362#define HAVE_avx512vl_testnmv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2363#define HAVE_avx512vl_testnmv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2364#define HAVE_avx512vl_testnmv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
2365#define HAVE_avx512vl_testnmv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
2366#define HAVE_avx512bw_packsswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
2367#define HAVE_avx512bw_packsswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2368#define HAVE_avx2_packsswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
2369#define HAVE_avx2_packsswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2370#define HAVE_sse2_packsswb (TARGET_SSE2 && 1 && 1)
2371#define HAVE_sse2_packsswb_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2372#define HAVE_avx512bw_packssdw ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
2373#define HAVE_avx512bw_packssdw_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2374#define HAVE_avx2_packssdw ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
2375#define HAVE_avx2_packssdw_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2376#define HAVE_sse2_packssdw (TARGET_SSE2 && 1 && 1)
2377#define HAVE_sse2_packssdw_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2378#define HAVE_avx512bw_packuswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
2379#define HAVE_avx512bw_packuswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2380#define HAVE_avx2_packuswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
2381#define HAVE_avx2_packuswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2382#define HAVE_sse2_packuswb (TARGET_SSE2 && 1 && 1)
2383#define HAVE_sse2_packuswb_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2384#define HAVE_avx512bw_interleave_highv64qi (TARGET_AVX512BW)
2385#define HAVE_avx512bw_interleave_highv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2386#define HAVE_avx2_interleave_highv32qi (TARGET_AVX2 && 1)
2387#define HAVE_avx2_interleave_highv32qi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2388#define HAVE_vec_interleave_highv16qi (TARGET_SSE2 && 1)
2389#define HAVE_vec_interleave_highv16qi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
2390#define HAVE_avx512bw_interleave_lowv64qi (TARGET_AVX512BW)
2391#define HAVE_avx512bw_interleave_lowv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2392#define HAVE_avx2_interleave_lowv32qi (TARGET_AVX2 && 1 && 1)
2393#define HAVE_avx2_interleave_lowv32qi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
2394#define HAVE_vec_interleave_lowv16qi (TARGET_SSE2 && 1 && 1)
2395#define HAVE_vec_interleave_lowv16qi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
2396#define HAVE_avx512bw_interleave_highv32hi (TARGET_AVX512BW)
2397#define HAVE_avx512bw_interleave_highv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2398#define HAVE_avx2_interleave_highv16hi (TARGET_AVX2 && 1 && 1)
2399#define HAVE_avx2_interleave_highv16hi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
2400#define HAVE_vec_interleave_highv8hi (TARGET_SSE2 && 1 && 1)
2401#define HAVE_vec_interleave_highv8hi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
2402#define HAVE_avx512bw_interleave_lowv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2403#define HAVE_avx2_interleave_lowv16hi (TARGET_AVX2 && 1 && 1)
2404#define HAVE_avx2_interleave_lowv16hi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
2405#define HAVE_vec_interleave_lowv8hi (TARGET_SSE2 && 1 && 1)
2406#define HAVE_vec_interleave_lowv8hi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
2407#define HAVE_avx2_interleave_highv8si (TARGET_AVX2 && 1)
2408#define HAVE_avx2_interleave_highv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2409#define HAVE_avx512f_interleave_highv16si_mask (TARGET_AVX512F)
2410#define HAVE_vec_interleave_highv4si (TARGET_SSE2 && 1)
2411#define HAVE_vec_interleave_highv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
2412#define HAVE_avx2_interleave_lowv8si (TARGET_AVX2 && 1)
2413#define HAVE_avx2_interleave_lowv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2414#define HAVE_avx512f_interleave_lowv16si_mask (TARGET_AVX512F)
2415#define HAVE_vec_interleave_lowv4si (TARGET_SSE2 && 1)
2416#define HAVE_vec_interleave_lowv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
2417#define HAVE_sse4_1_pinsrb ((TARGET_SSE2 \
2418 && ((unsigned) exact_log2 (INTVAL (operands[3])) \
2419 < GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE4_1))
2420#define HAVE_sse2_pinsrw (TARGET_SSE2 \
2421 && ((unsigned) exact_log2 (INTVAL (operands[3])) \
2422 < GET_MODE_NUNITS (V8HImode)))
2423#define HAVE_sse4_1_pinsrd ((TARGET_SSE2 \
2424 && ((unsigned) exact_log2 (INTVAL (operands[3])) \
2425 < GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE4_1))
2426#define HAVE_sse4_1_pinsrq ((TARGET_SSE2 \
2427 && ((unsigned) exact_log2 (INTVAL (operands[3])) \
2428 < GET_MODE_NUNITS (V2DImode))) && (TARGET_SSE4_1 && TARGET_64BIT))
2429#define HAVE_avx512dq_vinsertf64x2_1_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
2430#define HAVE_avx512dq_vinserti64x2_1_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
2431#define HAVE_avx512f_vinsertf32x4_1_mask (TARGET_AVX512F)
2432#define HAVE_avx512f_vinserti32x4_1_mask (TARGET_AVX512F)
2433#define HAVE_vec_set_lo_v16sf (TARGET_AVX512DQ)
2434#define HAVE_vec_set_lo_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
2435#define HAVE_vec_set_lo_v16si (TARGET_AVX512DQ)
2436#define HAVE_vec_set_lo_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
2437#define HAVE_vec_set_hi_v16sf (TARGET_AVX512DQ)
2438#define HAVE_vec_set_hi_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
2439#define HAVE_vec_set_hi_v16si (TARGET_AVX512DQ)
2440#define HAVE_vec_set_hi_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
2441#define HAVE_vec_set_lo_v8df (TARGET_AVX512F)
2442#define HAVE_vec_set_lo_v8df_mask (TARGET_AVX512F)
2443#define HAVE_vec_set_lo_v8di (TARGET_AVX512F)
2444#define HAVE_vec_set_lo_v8di_mask (TARGET_AVX512F)
2445#define HAVE_vec_set_hi_v8df (TARGET_AVX512F)
2446#define HAVE_vec_set_hi_v8df_mask (TARGET_AVX512F)
2447#define HAVE_vec_set_hi_v8di (TARGET_AVX512F)
2448#define HAVE_vec_set_hi_v8di_mask (TARGET_AVX512F)
2449#define HAVE_avx512dq_shuf_i64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
2450 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2451 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1))))
2452#define HAVE_avx512dq_shuf_f64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
2453 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2454 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1))))
2455#define HAVE_avx512f_shuf_f64x2_1 (TARGET_AVX512F \
2456 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2457 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
2458 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2459 && INTVAL (operands[9]) == (INTVAL (operands[10]) - 1)))
2460#define HAVE_avx512f_shuf_f64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
2461 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2462 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
2463 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2464 && INTVAL (operands[9]) == (INTVAL (operands[10]) - 1))))
2465#define HAVE_avx512f_shuf_i64x2_1 (TARGET_AVX512F \
2466 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2467 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
2468 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2469 && INTVAL (operands[9]) == (INTVAL (operands[10]) - 1)))
2470#define HAVE_avx512f_shuf_i64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
2471 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2472 && INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
2473 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2474 && INTVAL (operands[9]) == (INTVAL (operands[10]) - 1))))
2475#define HAVE_avx512vl_shuf_i32x4_1 (TARGET_AVX512VL \
2476 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2477 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2478 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2479 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2480 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2481 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3)))
2482#define HAVE_avx512vl_shuf_i32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
2483 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2484 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2485 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2486 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2487 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2488 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3))))
2489#define HAVE_avx512vl_shuf_f32x4_1 (TARGET_AVX512VL \
2490 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2491 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2492 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2493 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2494 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2495 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3)))
2496#define HAVE_avx512vl_shuf_f32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
2497 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2498 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2499 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2500 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2501 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2502 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3))))
2503#define HAVE_avx512f_shuf_f32x4_1 (TARGET_AVX512F \
2504 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2505 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2506 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2507 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2508 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2509 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
2510 && INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
2511 && INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
2512 && INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
2513 && INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
2514 && INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
2515 && INTVAL (operands[15]) == (INTVAL (operands[18]) - 3)))
2516#define HAVE_avx512f_shuf_f32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
2517 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2518 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2519 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2520 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2521 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2522 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
2523 && INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
2524 && INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
2525 && INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
2526 && INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
2527 && INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
2528 && INTVAL (operands[15]) == (INTVAL (operands[18]) - 3))))
2529#define HAVE_avx512f_shuf_i32x4_1 (TARGET_AVX512F \
2530 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2531 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2532 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2533 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2534 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2535 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
2536 && INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
2537 && INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
2538 && INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
2539 && INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
2540 && INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
2541 && INTVAL (operands[15]) == (INTVAL (operands[18]) - 3)))
2542#define HAVE_avx512f_shuf_i32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
2543 && (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
2544 && INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
2545 && INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
2546 && INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
2547 && INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
2548 && INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
2549 && INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
2550 && INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
2551 && INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
2552 && INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
2553 && INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
2554 && INTVAL (operands[15]) == (INTVAL (operands[18]) - 3))))
2555#define HAVE_avx512f_pshufd_1 (TARGET_AVX512F \
2556 && INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
2557 && INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
2558 && INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
2559 && INTVAL (operands[5]) + 4 == INTVAL (operands[9]) \
2560 && INTVAL (operands[2]) + 8 == INTVAL (operands[10]) \
2561 && INTVAL (operands[3]) + 8 == INTVAL (operands[11]) \
2562 && INTVAL (operands[4]) + 8 == INTVAL (operands[12]) \
2563 && INTVAL (operands[5]) + 8 == INTVAL (operands[13]) \
2564 && INTVAL (operands[2]) + 12 == INTVAL (operands[14]) \
2565 && INTVAL (operands[3]) + 12 == INTVAL (operands[15]) \
2566 && INTVAL (operands[4]) + 12 == INTVAL (operands[16]) \
2567 && INTVAL (operands[5]) + 12 == INTVAL (operands[17]))
2568#define HAVE_avx512f_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
2569 && INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
2570 && INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
2571 && INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
2572 && INTVAL (operands[5]) + 4 == INTVAL (operands[9]) \
2573 && INTVAL (operands[2]) + 8 == INTVAL (operands[10]) \
2574 && INTVAL (operands[3]) + 8 == INTVAL (operands[11]) \
2575 && INTVAL (operands[4]) + 8 == INTVAL (operands[12]) \
2576 && INTVAL (operands[5]) + 8 == INTVAL (operands[13]) \
2577 && INTVAL (operands[2]) + 12 == INTVAL (operands[14]) \
2578 && INTVAL (operands[3]) + 12 == INTVAL (operands[15]) \
2579 && INTVAL (operands[4]) + 12 == INTVAL (operands[16]) \
2580 && INTVAL (operands[5]) + 12 == INTVAL (operands[17])))
2581#define HAVE_avx2_pshufd_1 (TARGET_AVX2 \
2582 && 1 \
2583 && INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
2584 && INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
2585 && INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
2586 && INTVAL (operands[5]) + 4 == INTVAL (operands[9]))
2587#define HAVE_avx2_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
2588 && TARGET_AVX512VL \
2589 && INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
2590 && INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
2591 && INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
2592 && INTVAL (operands[5]) + 4 == INTVAL (operands[9])))
2593#define HAVE_sse2_pshufd_1 (TARGET_SSE2 && 1)
2594#define HAVE_sse2_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
2595#define HAVE_avx512bw_pshuflwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2596#define HAVE_avx2_pshuflw_1 (TARGET_AVX2 \
2597 && 1 && 1 \
2598 && INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
2599 && INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
2600 && INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
2601 && INTVAL (operands[5]) + 8 == INTVAL (operands[9]))
2602#define HAVE_avx2_pshuflw_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
2603 && TARGET_AVX512BW && TARGET_AVX512VL \
2604 && INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
2605 && INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
2606 && INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
2607 && INTVAL (operands[5]) + 8 == INTVAL (operands[9])))
2608#define HAVE_sse2_pshuflw_1 (TARGET_SSE2 && 1 && 1)
2609#define HAVE_sse2_pshuflw_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512BW && TARGET_AVX512VL))
2610#define HAVE_avx512bw_pshufhwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2611#define HAVE_avx2_pshufhw_1 (TARGET_AVX2 \
2612 && 1 && 1 \
2613 && INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
2614 && INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
2615 && INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
2616 && INTVAL (operands[5]) + 8 == INTVAL (operands[9]))
2617#define HAVE_avx2_pshufhw_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
2618 && TARGET_AVX512BW && TARGET_AVX512VL \
2619 && INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
2620 && INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
2621 && INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
2622 && INTVAL (operands[5]) + 8 == INTVAL (operands[9])))
2623#define HAVE_sse2_pshufhw_1 (TARGET_SSE2 && 1 && 1)
2624#define HAVE_sse2_pshufhw_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512BW && TARGET_AVX512VL))
2625#define HAVE_sse2_loadld (TARGET_SSE)
2626#define HAVE_vec_concatv2di (TARGET_SSE)
2627#define HAVE_avx512f_psadbw ((TARGET_SSE2) && (TARGET_AVX512BW))
2628#define HAVE_avx2_psadbw ((TARGET_SSE2) && (TARGET_AVX2))
2629#define HAVE_sse2_psadbw (TARGET_SSE2)
2630#define HAVE_avx_movmskps256 ((TARGET_SSE) && (TARGET_AVX))
2631#define HAVE_sse_movmskps (TARGET_SSE)
2632#define HAVE_avx_movmskpd256 ((TARGET_SSE) && (TARGET_AVX))
2633#define HAVE_sse2_movmskpd ((TARGET_SSE) && (TARGET_SSE2))
2634#define HAVE_avx2_pmovmskb ((TARGET_SSE2) && (TARGET_AVX2))
2635#define HAVE_sse2_pmovmskb (TARGET_SSE2)
2636#define HAVE_sse_ldmxcsr (TARGET_SSE)
2637#define HAVE_sse_stmxcsr (TARGET_SSE)
2638#define HAVE_sse2_clflush (TARGET_SSE2)
2639#define HAVE_sse3_mwait (TARGET_SSE3)
2640#define HAVE_sse3_monitor_si ((TARGET_SSE3) && (Pmode == SImode))
2641#define HAVE_sse3_monitor_di ((TARGET_SSE3) && (Pmode == DImode))
2642#define HAVE_avx2_phaddwv16hi3 (TARGET_AVX2)
2643#define HAVE_avx2_phaddswv16hi3 (TARGET_AVX2)
2644#define HAVE_avx2_phsubwv16hi3 (TARGET_AVX2)
2645#define HAVE_avx2_phsubswv16hi3 (TARGET_AVX2)
2646#define HAVE_ssse3_phaddwv8hi3 (TARGET_SSSE3)
2647#define HAVE_ssse3_phaddswv8hi3 (TARGET_SSSE3)
2648#define HAVE_ssse3_phsubwv8hi3 (TARGET_SSSE3)
2649#define HAVE_ssse3_phsubswv8hi3 (TARGET_SSSE3)
2650#define HAVE_ssse3_phaddwv4hi3 (TARGET_SSSE3)
2651#define HAVE_ssse3_phaddswv4hi3 (TARGET_SSSE3)
2652#define HAVE_ssse3_phsubwv4hi3 (TARGET_SSSE3)
2653#define HAVE_ssse3_phsubswv4hi3 (TARGET_SSSE3)
2654#define HAVE_avx2_phadddv8si3 (TARGET_AVX2)
2655#define HAVE_avx2_phsubdv8si3 (TARGET_AVX2)
2656#define HAVE_ssse3_phadddv4si3 (TARGET_SSSE3)
2657#define HAVE_ssse3_phsubdv4si3 (TARGET_SSSE3)
2658#define HAVE_ssse3_phadddv2si3 (TARGET_SSSE3)
2659#define HAVE_ssse3_phsubdv2si3 (TARGET_SSSE3)
2660#define HAVE_avx2_pmaddubsw256 (TARGET_AVX2)
2661#define HAVE_avx512bw_pmaddubsw512v8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2662#define HAVE_avx512bw_pmaddubsw512v8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2663#define HAVE_avx512bw_pmaddubsw512v16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2664#define HAVE_avx512bw_pmaddubsw512v16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
2665#define HAVE_avx512bw_pmaddubsw512v32hi (TARGET_AVX512BW)
2666#define HAVE_avx512bw_pmaddubsw512v32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2667#define HAVE_avx512bw_umulhrswv32hi3 (TARGET_AVX512BW)
2668#define HAVE_avx512bw_umulhrswv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2669#define HAVE_ssse3_pmaddubsw128 (TARGET_SSSE3)
2670#define HAVE_ssse3_pmaddubsw (TARGET_SSSE3)
2671#define HAVE_avx512bw_pshufbv64qi3 ((TARGET_SSSE3 && 1 && 1) && (TARGET_AVX512BW))
2672#define HAVE_avx512bw_pshufbv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSSE3 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2673#define HAVE_avx2_pshufbv32qi3 ((TARGET_SSSE3 && 1 && 1) && (TARGET_AVX2))
2674#define HAVE_avx2_pshufbv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSSE3 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2675#define HAVE_ssse3_pshufbv16qi3 (TARGET_SSSE3 && 1 && 1)
2676#define HAVE_ssse3_pshufbv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSSE3 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2677#define HAVE_ssse3_pshufbv8qi3 (TARGET_SSSE3)
2678#define HAVE_avx2_psignv32qi3 ((TARGET_SSSE3) && (TARGET_AVX2))
2679#define HAVE_ssse3_psignv16qi3 (TARGET_SSSE3)
2680#define HAVE_avx2_psignv16hi3 ((TARGET_SSSE3) && (TARGET_AVX2))
2681#define HAVE_ssse3_psignv8hi3 (TARGET_SSSE3)
2682#define HAVE_avx2_psignv8si3 ((TARGET_SSSE3) && (TARGET_AVX2))
2683#define HAVE_ssse3_psignv4si3 (TARGET_SSSE3)
2684#define HAVE_ssse3_psignv8qi3 (TARGET_SSSE3)
2685#define HAVE_ssse3_psignv4hi3 (TARGET_SSSE3)
2686#define HAVE_ssse3_psignv2si3 (TARGET_SSSE3)
2687#define HAVE_avx512bw_palignrv64qi_mask ((TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512BW))
2688#define HAVE_avx2_palignrv32qi_mask ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2))
2689#define HAVE_ssse3_palignrv16qi_mask (TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL))
2690#define HAVE_avx512bw_palignrv4ti ((TARGET_SSSE3) && (TARGET_AVX512BW))
2691#define HAVE_avx2_palignrv2ti ((TARGET_SSSE3) && (TARGET_AVX2))
2692#define HAVE_ssse3_palignrti (TARGET_SSSE3)
2693#define HAVE_ssse3_palignrdi (TARGET_SSSE3)
2694#define HAVE_absv16si2_mask (TARGET_AVX512F)
2695#define HAVE_absv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2696#define HAVE_absv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2697#define HAVE_absv8di2_mask (TARGET_AVX512F)
2698#define HAVE_absv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2699#define HAVE_absv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
2700#define HAVE_absv64qi2_mask (TARGET_AVX512BW)
2701#define HAVE_absv16qi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2702#define HAVE_absv32qi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2703#define HAVE_absv32hi2_mask (TARGET_AVX512BW)
2704#define HAVE_absv16hi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2705#define HAVE_absv8hi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
2706#define HAVE_absv8qi2 (TARGET_SSSE3)
2707#define HAVE_absv4hi2 (TARGET_SSSE3)
2708#define HAVE_absv2si2 (TARGET_SSSE3)
2709#define HAVE_sse4a_movntsf (TARGET_SSE4A)
2710#define HAVE_sse4a_movntdf (TARGET_SSE4A)
2711#define HAVE_sse4a_vmmovntv4sf (TARGET_SSE4A)
2712#define HAVE_sse4a_vmmovntv2df ((TARGET_SSE4A) && (TARGET_SSE2))
2713#define HAVE_sse4a_extrqi (TARGET_SSE4A)
2714#define HAVE_sse4a_extrq (TARGET_SSE4A)
2715#define HAVE_sse4a_insertqi (TARGET_SSE4A)
2716#define HAVE_sse4a_insertq (TARGET_SSE4A)
2717#define HAVE_avx_blendps256 ((TARGET_SSE4_1) && (TARGET_AVX))
2718#define HAVE_sse4_1_blendps (TARGET_SSE4_1)
2719#define HAVE_avx_blendpd256 ((TARGET_SSE4_1) && (TARGET_AVX))
2720#define HAVE_sse4_1_blendpd ((TARGET_SSE4_1) && (TARGET_SSE2))
2721#define HAVE_avx_blendvps256 ((TARGET_SSE4_1) && (TARGET_AVX))
2722#define HAVE_sse4_1_blendvps (TARGET_SSE4_1)
2723#define HAVE_avx_blendvpd256 ((TARGET_SSE4_1) && (TARGET_AVX))
2724#define HAVE_sse4_1_blendvpd ((TARGET_SSE4_1) && (TARGET_SSE2))
2725#define HAVE_avx_dpps256 ((TARGET_SSE4_1) && (TARGET_AVX))
2726#define HAVE_sse4_1_dpps (TARGET_SSE4_1)
2727#define HAVE_avx_dppd256 ((TARGET_SSE4_1) && (TARGET_AVX))
2728#define HAVE_sse4_1_dppd ((TARGET_SSE4_1) && (TARGET_SSE2))
2729#define HAVE_avx512f_movntdqa ((TARGET_SSE4_1) && (TARGET_AVX512F))
2730#define HAVE_avx2_movntdqa ((TARGET_SSE4_1) && (TARGET_AVX2))
2731#define HAVE_sse4_1_movntdqa (TARGET_SSE4_1)
2732#define HAVE_avx2_mpsadbw ((TARGET_SSE4_1) && (TARGET_AVX2))
2733#define HAVE_sse4_1_mpsadbw (TARGET_SSE4_1)
2734#define HAVE_avx512bw_packusdw ((TARGET_SSE4_1 && 1 && 1) && (TARGET_AVX512BW))
2735#define HAVE_avx512bw_packusdw_mask ((TARGET_AVX512F) && ((TARGET_SSE4_1 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
2736#define HAVE_avx2_packusdw ((TARGET_SSE4_1 && 1 && 1) && (TARGET_AVX2))
2737#define HAVE_avx2_packusdw_mask ((TARGET_AVX512F) && ((TARGET_SSE4_1 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
2738#define HAVE_sse4_1_packusdw (TARGET_SSE4_1 && 1 && 1)
2739#define HAVE_sse4_1_packusdw_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
2740#define HAVE_avx2_pblendvb ((TARGET_SSE4_1) && (TARGET_AVX2))
2741#define HAVE_sse4_1_pblendvb (TARGET_SSE4_1)
2742#define HAVE_sse4_1_pblendw (TARGET_SSE4_1)
2743#define HAVE_avx2_pblenddv8si (TARGET_AVX2)
2744#define HAVE_avx2_pblenddv4si (TARGET_AVX2)
2745#define HAVE_sse4_1_phminposuw (TARGET_SSE4_1)
2746#define HAVE_avx2_sign_extendv16qiv16hi2 (TARGET_AVX2 && 1 && 1)
2747#define HAVE_avx2_sign_extendv16qiv16hi2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512BW && TARGET_AVX512VL))
2748#define HAVE_avx2_zero_extendv16qiv16hi2 (TARGET_AVX2 && 1 && 1)
2749#define HAVE_avx2_zero_extendv16qiv16hi2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512BW && TARGET_AVX512VL))
2750#define HAVE_avx512bw_sign_extendv32qiv32hi2 (TARGET_AVX512BW)
2751#define HAVE_avx512bw_sign_extendv32qiv32hi2_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2752#define HAVE_avx512bw_zero_extendv32qiv32hi2 (TARGET_AVX512BW)
2753#define HAVE_avx512bw_zero_extendv32qiv32hi2_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
2754#define HAVE_sse4_1_sign_extendv8qiv8hi2 (TARGET_SSE4_1 && 1 && 1)
2755#define HAVE_sse4_1_sign_extendv8qiv8hi2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512BW && TARGET_AVX512VL))
2756#define HAVE_sse4_1_zero_extendv8qiv8hi2 (TARGET_SSE4_1 && 1 && 1)
2757#define HAVE_sse4_1_zero_extendv8qiv8hi2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512BW && TARGET_AVX512VL))
2758#define HAVE_avx512f_sign_extendv16qiv16si2_mask (TARGET_AVX512F)
2759#define HAVE_avx512f_zero_extendv16qiv16si2_mask (TARGET_AVX512F)
2760#define HAVE_avx2_sign_extendv8qiv8si2 (TARGET_AVX2 && 1)
2761#define HAVE_avx2_sign_extendv8qiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2762#define HAVE_avx2_zero_extendv8qiv8si2 (TARGET_AVX2 && 1)
2763#define HAVE_avx2_zero_extendv8qiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2764#define HAVE_sse4_1_sign_extendv4qiv4si2 (TARGET_SSE4_1 && 1)
2765#define HAVE_sse4_1_sign_extendv4qiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2766#define HAVE_sse4_1_zero_extendv4qiv4si2 (TARGET_SSE4_1 && 1)
2767#define HAVE_sse4_1_zero_extendv4qiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2768#define HAVE_avx512f_sign_extendv16hiv16si2 (TARGET_AVX512F)
2769#define HAVE_avx512f_sign_extendv16hiv16si2_mask (TARGET_AVX512F)
2770#define HAVE_avx512f_zero_extendv16hiv16si2 (TARGET_AVX512F)
2771#define HAVE_avx512f_zero_extendv16hiv16si2_mask (TARGET_AVX512F)
2772#define HAVE_avx2_sign_extendv8hiv8si2 (TARGET_AVX2 && 1)
2773#define HAVE_avx2_sign_extendv8hiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2774#define HAVE_avx2_zero_extendv8hiv8si2 (TARGET_AVX2 && 1)
2775#define HAVE_avx2_zero_extendv8hiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2776#define HAVE_sse4_1_sign_extendv4hiv4si2 (TARGET_SSE4_1 && 1)
2777#define HAVE_sse4_1_sign_extendv4hiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2778#define HAVE_sse4_1_zero_extendv4hiv4si2 (TARGET_SSE4_1 && 1)
2779#define HAVE_sse4_1_zero_extendv4hiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2780#define HAVE_avx512f_sign_extendv8qiv8di2 (TARGET_AVX512F)
2781#define HAVE_avx512f_sign_extendv8qiv8di2_mask (TARGET_AVX512F)
2782#define HAVE_avx512f_zero_extendv8qiv8di2 (TARGET_AVX512F)
2783#define HAVE_avx512f_zero_extendv8qiv8di2_mask (TARGET_AVX512F)
2784#define HAVE_avx2_sign_extendv4qiv4di2 (TARGET_AVX2 && 1)
2785#define HAVE_avx2_sign_extendv4qiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2786#define HAVE_avx2_zero_extendv4qiv4di2 (TARGET_AVX2 && 1)
2787#define HAVE_avx2_zero_extendv4qiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2788#define HAVE_sse4_1_sign_extendv2qiv2di2 (TARGET_SSE4_1 && 1)
2789#define HAVE_sse4_1_sign_extendv2qiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2790#define HAVE_sse4_1_zero_extendv2qiv2di2 (TARGET_SSE4_1 && 1)
2791#define HAVE_sse4_1_zero_extendv2qiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2792#define HAVE_avx512f_sign_extendv8hiv8di2 (TARGET_AVX512F)
2793#define HAVE_avx512f_sign_extendv8hiv8di2_mask (TARGET_AVX512F)
2794#define HAVE_avx512f_zero_extendv8hiv8di2 (TARGET_AVX512F)
2795#define HAVE_avx512f_zero_extendv8hiv8di2_mask (TARGET_AVX512F)
2796#define HAVE_avx2_sign_extendv4hiv4di2 (TARGET_AVX2 && 1)
2797#define HAVE_avx2_sign_extendv4hiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2798#define HAVE_avx2_zero_extendv4hiv4di2 (TARGET_AVX2 && 1)
2799#define HAVE_avx2_zero_extendv4hiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2800#define HAVE_sse4_1_sign_extendv2hiv2di2 (TARGET_SSE4_1 && 1)
2801#define HAVE_sse4_1_sign_extendv2hiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2802#define HAVE_sse4_1_zero_extendv2hiv2di2 (TARGET_SSE4_1 && 1)
2803#define HAVE_sse4_1_zero_extendv2hiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2804#define HAVE_avx512f_sign_extendv8siv8di2 (TARGET_AVX512F)
2805#define HAVE_avx512f_sign_extendv8siv8di2_mask (TARGET_AVX512F)
2806#define HAVE_avx512f_zero_extendv8siv8di2 (TARGET_AVX512F)
2807#define HAVE_avx512f_zero_extendv8siv8di2_mask (TARGET_AVX512F)
2808#define HAVE_avx2_sign_extendv4siv4di2 (TARGET_AVX2 && 1)
2809#define HAVE_avx2_sign_extendv4siv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2810#define HAVE_avx2_zero_extendv4siv4di2 (TARGET_AVX2 && 1)
2811#define HAVE_avx2_zero_extendv4siv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
2812#define HAVE_sse4_1_sign_extendv2siv2di2 (TARGET_SSE4_1 && 1)
2813#define HAVE_sse4_1_sign_extendv2siv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2814#define HAVE_sse4_1_zero_extendv2siv2di2 (TARGET_SSE4_1 && 1)
2815#define HAVE_sse4_1_zero_extendv2siv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
2816#define HAVE_avx_vtestps256 (TARGET_AVX)
2817#define HAVE_avx_vtestps (TARGET_AVX)
2818#define HAVE_avx_vtestpd256 (TARGET_AVX)
2819#define HAVE_avx_vtestpd ((TARGET_AVX) && (TARGET_SSE2))
2820#define HAVE_sse4_1_ptestv16qi (TARGET_SSE4_1)
2821#define HAVE_sse4_1_ptestv8hi (TARGET_SSE4_1)
2822#define HAVE_sse4_1_ptestv4si (TARGET_SSE4_1)
2823#define HAVE_sse4_1_ptestv2di (TARGET_SSE4_1)
2824#define HAVE_sse4_1_ptestv4sf (TARGET_SSE4_1)
2825#define HAVE_sse4_1_ptestv2df (TARGET_SSE4_1)
2826#define HAVE_avx_ptestv32qi ((TARGET_SSE4_1) && (TARGET_AVX))
2827#define HAVE_avx_ptestv16hi ((TARGET_SSE4_1) && (TARGET_AVX))
2828#define HAVE_avx_ptestv8si ((TARGET_SSE4_1) && (TARGET_AVX))
2829#define HAVE_avx_ptestv4di ((TARGET_SSE4_1) && (TARGET_AVX))
2830#define HAVE_avx_ptestv8sf ((TARGET_SSE4_1) && (TARGET_AVX))
2831#define HAVE_avx_ptestv4df ((TARGET_SSE4_1) && (TARGET_AVX))
2832#define HAVE_ptesttf2 (TARGET_SSE4_1)
2833#define HAVE_avx_roundps256 ((TARGET_ROUND) && (TARGET_AVX))
2834#define HAVE_sse4_1_roundps (TARGET_ROUND)
2835#define HAVE_avx_roundpd256 ((TARGET_ROUND) && (TARGET_AVX))
2836#define HAVE_sse4_1_roundpd ((TARGET_ROUND) && (TARGET_SSE2))
2837#define HAVE_sse4_1_roundss (TARGET_ROUND)
2838#define HAVE_sse4_1_roundsd ((TARGET_ROUND) && (TARGET_SSE2))
2839#define HAVE_sse4_2_pcmpestr (TARGET_SSE4_2 \
2840 && can_create_pseudo_p ())
2841#define HAVE_sse4_2_pcmpestri (TARGET_SSE4_2)
2842#define HAVE_sse4_2_pcmpestrm (TARGET_SSE4_2)
2843#define HAVE_sse4_2_pcmpestr_cconly (TARGET_SSE4_2)
2844#define HAVE_sse4_2_pcmpistr (TARGET_SSE4_2 \
2845 && can_create_pseudo_p ())
2846#define HAVE_sse4_2_pcmpistri (TARGET_SSE4_2)
2847#define HAVE_sse4_2_pcmpistrm (TARGET_SSE4_2)
2848#define HAVE_sse4_2_pcmpistr_cconly (TARGET_SSE4_2)
2849#define HAVE_avx512er_exp2v16sf (TARGET_AVX512ER)
2850#define HAVE_avx512er_exp2v16sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
2851#define HAVE_avx512er_exp2v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2852#define HAVE_avx512er_exp2v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2853#define HAVE_avx512er_exp2v8df (TARGET_AVX512ER)
2854#define HAVE_avx512er_exp2v8df_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
2855#define HAVE_avx512er_exp2v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2856#define HAVE_avx512er_exp2v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2857#define HAVE_avx512er_rcp28v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2858#define HAVE_avx512er_rcp28v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2859#define HAVE_avx512er_rcp28v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2860#define HAVE_avx512er_rcp28v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2861#define HAVE_avx512er_vmrcp28v4sf (TARGET_AVX512ER)
2862#define HAVE_avx512er_vmrcp28v4sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
2863#define HAVE_avx512er_vmrcp28v2df ((TARGET_AVX512ER) && (TARGET_SSE2))
2864#define HAVE_avx512er_vmrcp28v2df_round ((TARGET_AVX512F) && ((TARGET_AVX512ER) && (TARGET_SSE2)))
2865#define HAVE_avx512er_rsqrt28v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2866#define HAVE_avx512er_rsqrt28v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2867#define HAVE_avx512er_rsqrt28v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
2868#define HAVE_avx512er_rsqrt28v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
2869#define HAVE_avx512er_vmrsqrt28v4sf (TARGET_AVX512ER)
2870#define HAVE_avx512er_vmrsqrt28v4sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
2871#define HAVE_avx512er_vmrsqrt28v2df ((TARGET_AVX512ER) && (TARGET_SSE2))
2872#define HAVE_avx512er_vmrsqrt28v2df_round ((TARGET_AVX512F) && ((TARGET_AVX512ER) && (TARGET_SSE2)))
2873#define HAVE_xop_pmacsww (TARGET_XOP)
2874#define HAVE_xop_pmacssww (TARGET_XOP)
2875#define HAVE_xop_pmacsdd (TARGET_XOP)
2876#define HAVE_xop_pmacssdd (TARGET_XOP)
2877#define HAVE_xop_pmacsdql (TARGET_XOP)
2878#define HAVE_xop_pmacssdql (TARGET_XOP)
2879#define HAVE_xop_pmacsdqh (TARGET_XOP)
2880#define HAVE_xop_pmacssdqh (TARGET_XOP)
2881#define HAVE_xop_pmacswd (TARGET_XOP)
2882#define HAVE_xop_pmacsswd (TARGET_XOP)
2883#define HAVE_xop_pmadcswd (TARGET_XOP)
2884#define HAVE_xop_pmadcsswd (TARGET_XOP)
2885#define HAVE_xop_pcmov_v32qi256 ((TARGET_XOP) && (TARGET_AVX))
2886#define HAVE_xop_pcmov_v16qi (TARGET_XOP)
2887#define HAVE_xop_pcmov_v16hi256 ((TARGET_XOP) && (TARGET_AVX))
2888#define HAVE_xop_pcmov_v8hi (TARGET_XOP)
2889#define HAVE_xop_pcmov_v16si512 ((TARGET_XOP) && (TARGET_AVX512F))
2890#define HAVE_xop_pcmov_v8si256 ((TARGET_XOP) && (TARGET_AVX))
2891#define HAVE_xop_pcmov_v4si (TARGET_XOP)
2892#define HAVE_xop_pcmov_v8di512 ((TARGET_XOP) && (TARGET_AVX512F))
2893#define HAVE_xop_pcmov_v4di256 ((TARGET_XOP) && (TARGET_AVX))
2894#define HAVE_xop_pcmov_v2di (TARGET_XOP)
2895#define HAVE_xop_pcmov_v16sf512 ((TARGET_XOP) && (TARGET_AVX512F))
2896#define HAVE_xop_pcmov_v8sf256 ((TARGET_XOP) && (TARGET_AVX))
2897#define HAVE_xop_pcmov_v4sf (TARGET_XOP)
2898#define HAVE_xop_pcmov_v8df512 ((TARGET_XOP) && (TARGET_AVX512F))
2899#define HAVE_xop_pcmov_v4df256 ((TARGET_XOP) && (TARGET_AVX))
2900#define HAVE_xop_pcmov_v2df ((TARGET_XOP) && (TARGET_SSE2))
2901#define HAVE_xop_phaddbw (TARGET_XOP)
2902#define HAVE_xop_phaddubw (TARGET_XOP)
2903#define HAVE_xop_phaddbd (TARGET_XOP)
2904#define HAVE_xop_phaddubd (TARGET_XOP)
2905#define HAVE_xop_phaddbq (TARGET_XOP)
2906#define HAVE_xop_phaddubq (TARGET_XOP)
2907#define HAVE_xop_phaddwd (TARGET_XOP)
2908#define HAVE_xop_phadduwd (TARGET_XOP)
2909#define HAVE_xop_phaddwq (TARGET_XOP)
2910#define HAVE_xop_phadduwq (TARGET_XOP)
2911#define HAVE_xop_phadddq (TARGET_XOP)
2912#define HAVE_xop_phaddudq (TARGET_XOP)
2913#define HAVE_xop_phsubbw (TARGET_XOP)
2914#define HAVE_xop_phsubwd (TARGET_XOP)
2915#define HAVE_xop_phsubdq (TARGET_XOP)
2916#define HAVE_xop_pperm (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
2917#define HAVE_xop_pperm_pack_v2di_v4si (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
2918#define HAVE_xop_pperm_pack_v4si_v8hi (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
2919#define HAVE_xop_pperm_pack_v8hi_v16qi (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
2920#define HAVE_xop_rotlv16qi3 (TARGET_XOP)
2921#define HAVE_xop_rotlv8hi3 (TARGET_XOP)
2922#define HAVE_xop_rotlv4si3 (TARGET_XOP)
2923#define HAVE_xop_rotlv2di3 (TARGET_XOP)
2924#define HAVE_xop_rotrv16qi3 (TARGET_XOP)
2925#define HAVE_xop_rotrv8hi3 (TARGET_XOP)
2926#define HAVE_xop_rotrv4si3 (TARGET_XOP)
2927#define HAVE_xop_rotrv2di3 (TARGET_XOP)
2928#define HAVE_xop_vrotlv16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2929#define HAVE_xop_vrotlv8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2930#define HAVE_xop_vrotlv4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2931#define HAVE_xop_vrotlv2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2932#define HAVE_xop_shav16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2933#define HAVE_xop_shav8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2934#define HAVE_xop_shav4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2935#define HAVE_xop_shav2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2936#define HAVE_xop_shlv16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2937#define HAVE_xop_shlv8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2938#define HAVE_xop_shlv4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2939#define HAVE_xop_shlv2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
2940#define HAVE_xop_frczsf2 (TARGET_XOP)
2941#define HAVE_xop_frczdf2 (TARGET_XOP)
2942#define HAVE_xop_frczv4sf2 (TARGET_XOP)
2943#define HAVE_xop_frczv2df2 (TARGET_XOP)
2944#define HAVE_xop_frczv8sf2 (TARGET_XOP)
2945#define HAVE_xop_frczv4df2 (TARGET_XOP)
2946#define HAVE_xop_maskcmpv16qi3 (TARGET_XOP)
2947#define HAVE_xop_maskcmpv8hi3 (TARGET_XOP)
2948#define HAVE_xop_maskcmpv4si3 (TARGET_XOP)
2949#define HAVE_xop_maskcmpv2di3 (TARGET_XOP)
2950#define HAVE_xop_maskcmp_unsv16qi3 (TARGET_XOP)
2951#define HAVE_xop_maskcmp_unsv8hi3 (TARGET_XOP)
2952#define HAVE_xop_maskcmp_unsv4si3 (TARGET_XOP)
2953#define HAVE_xop_maskcmp_unsv2di3 (TARGET_XOP)
2954#define HAVE_xop_maskcmp_uns2v16qi3 (TARGET_XOP)
2955#define HAVE_xop_maskcmp_uns2v8hi3 (TARGET_XOP)
2956#define HAVE_xop_maskcmp_uns2v4si3 (TARGET_XOP)
2957#define HAVE_xop_maskcmp_uns2v2di3 (TARGET_XOP)
2958#define HAVE_xop_pcom_tfv16qi3 (TARGET_XOP)
2959#define HAVE_xop_pcom_tfv8hi3 (TARGET_XOP)
2960#define HAVE_xop_pcom_tfv4si3 (TARGET_XOP)
2961#define HAVE_xop_pcom_tfv2di3 (TARGET_XOP)
2962#define HAVE_xop_vpermil2v8sf3 ((TARGET_XOP) && (TARGET_AVX))
2963#define HAVE_xop_vpermil2v4sf3 (TARGET_XOP)
2964#define HAVE_xop_vpermil2v4df3 ((TARGET_XOP) && (TARGET_AVX))
2965#define HAVE_xop_vpermil2v2df3 ((TARGET_XOP) && (TARGET_SSE2))
2966#define HAVE_aesenc (TARGET_AES)
2967#define HAVE_aesenclast (TARGET_AES)
2968#define HAVE_aesdec (TARGET_AES)
2969#define HAVE_aesdeclast (TARGET_AES)
2970#define HAVE_aesimc (TARGET_AES)
2971#define HAVE_aeskeygenassist (TARGET_AES)
2972#define HAVE_pclmulqdq (TARGET_PCLMUL)
2973#define HAVE_avx_vzeroupper (TARGET_AVX)
2974#define HAVE_avx2_pbroadcastv16si ((TARGET_AVX2) && (TARGET_AVX512F))
2975#define HAVE_avx2_pbroadcastv8di ((TARGET_AVX2) && (TARGET_AVX512F))
2976#define HAVE_avx2_pbroadcastv64qi ((TARGET_AVX2) && (TARGET_AVX512BW))
2977#define HAVE_avx2_pbroadcastv32qi ((TARGET_AVX2) && (TARGET_AVX))
2978#define HAVE_avx2_pbroadcastv16qi (TARGET_AVX2)
2979#define HAVE_avx2_pbroadcastv32hi ((TARGET_AVX2) && (TARGET_AVX512BW))
2980#define HAVE_avx2_pbroadcastv16hi ((TARGET_AVX2) && (TARGET_AVX))
2981#define HAVE_avx2_pbroadcastv8hi (TARGET_AVX2)
2982#define HAVE_avx2_pbroadcastv8si ((TARGET_AVX2) && (TARGET_AVX))
2983#define HAVE_avx2_pbroadcastv4si (TARGET_AVX2)
2984#define HAVE_avx2_pbroadcastv4di ((TARGET_AVX2) && (TARGET_AVX))
2985#define HAVE_avx2_pbroadcastv2di (TARGET_AVX2)
2986#define HAVE_avx2_pbroadcastv32qi_1 (TARGET_AVX2)
2987#define HAVE_avx2_pbroadcastv16hi_1 (TARGET_AVX2)
2988#define HAVE_avx2_pbroadcastv8si_1 (TARGET_AVX2)
2989#define HAVE_avx2_pbroadcastv4di_1 (TARGET_AVX2)
2990#define HAVE_avx2_permvarv8si (TARGET_AVX2 && 1)
2991#define HAVE_avx2_permvarv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
2992#define HAVE_avx2_permvarv8sf (TARGET_AVX2 && 1)
2993#define HAVE_avx2_permvarv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
2994#define HAVE_avx512f_permvarv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
2995#define HAVE_avx512f_permvarv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
2996#define HAVE_avx512f_permvarv16sf ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
2997#define HAVE_avx512f_permvarv16sf_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
2998#define HAVE_avx512f_permvarv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
2999#define HAVE_avx512f_permvarv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3000#define HAVE_avx512f_permvarv8df ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3001#define HAVE_avx512f_permvarv8df_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3002#define HAVE_avx2_permvarv4di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
3003#define HAVE_avx2_permvarv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3004#define HAVE_avx2_permvarv4df ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
3005#define HAVE_avx2_permvarv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3006#define HAVE_avx512bw_permvarv64qi (TARGET_AVX512VBMI && 1)
3007#define HAVE_avx512bw_permvarv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512VBMI && (64 == 64 || TARGET_AVX512VL)))
3008#define HAVE_avx512vl_permvarv16qi ((TARGET_AVX512VBMI && 1) && (TARGET_AVX512VL))
3009#define HAVE_avx512vl_permvarv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3010#define HAVE_avx512vl_permvarv32qi ((TARGET_AVX512VBMI && 1) && (TARGET_AVX512VL))
3011#define HAVE_avx512vl_permvarv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3012#define HAVE_avx512vl_permvarv8hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512VL))
3013#define HAVE_avx512vl_permvarv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3014#define HAVE_avx512vl_permvarv16hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512VL))
3015#define HAVE_avx512vl_permvarv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3016#define HAVE_avx512bw_permvarv32hi (TARGET_AVX512BW && 1)
3017#define HAVE_avx512bw_permvarv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)))
3018#define HAVE_avx2_permv4di_1 (TARGET_AVX2 && 1)
3019#define HAVE_avx2_permv4di_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3020#define HAVE_avx2_permv4df_1 (TARGET_AVX2 && 1)
3021#define HAVE_avx2_permv4df_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3022#define HAVE_avx512f_permv8df_1 (TARGET_AVX512F && 1 \
3023 && (INTVAL (operands[2]) == (INTVAL (operands[6]) - 4) \
3024 && INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
3025 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
3026 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4)))
3027#define HAVE_avx512f_permv8df_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) \
3028 && (INTVAL (operands[2]) == (INTVAL (operands[6]) - 4) \
3029 && INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
3030 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
3031 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4))))
3032#define HAVE_avx512f_permv8di_1 (TARGET_AVX512F && 1 \
3033 && (INTVAL (operands[2]) == (INTVAL (operands[6]) - 4) \
3034 && INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
3035 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
3036 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4)))
3037#define HAVE_avx512f_permv8di_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) \
3038 && (INTVAL (operands[2]) == (INTVAL (operands[6]) - 4) \
3039 && INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
3040 && INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
3041 && INTVAL (operands[5]) == (INTVAL (operands[9]) - 4))))
3042#define HAVE_avx2_permv2ti (TARGET_AVX2)
3043#define HAVE_avx2_vec_dupv4df (TARGET_AVX2)
3044#define HAVE_avx512f_vec_dupv16si_1 (TARGET_AVX512F)
3045#define HAVE_avx512f_vec_dupv8di_1 (TARGET_AVX512F)
3046#define HAVE_avx512bw_vec_dupv32hi_1 ((TARGET_AVX512F) && (TARGET_AVX512BW))
3047#define HAVE_avx512bw_vec_dupv64qi_1 ((TARGET_AVX512F) && (TARGET_AVX512BW))
3048#define HAVE_avx512f_vec_dupv16si (TARGET_AVX512F)
3049#define HAVE_avx512f_vec_dupv16si_mask (TARGET_AVX512F)
3050#define HAVE_avx512vl_vec_dupv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
3051#define HAVE_avx512vl_vec_dupv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3052#define HAVE_avx512vl_vec_dupv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
3053#define HAVE_avx512vl_vec_dupv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3054#define HAVE_avx512f_vec_dupv8di (TARGET_AVX512F)
3055#define HAVE_avx512f_vec_dupv8di_mask (TARGET_AVX512F)
3056#define HAVE_avx512vl_vec_dupv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
3057#define HAVE_avx512vl_vec_dupv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3058#define HAVE_avx512vl_vec_dupv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
3059#define HAVE_avx512vl_vec_dupv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3060#define HAVE_avx512f_vec_dupv16sf (TARGET_AVX512F)
3061#define HAVE_avx512f_vec_dupv16sf_mask (TARGET_AVX512F)
3062#define HAVE_avx512vl_vec_dupv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
3063#define HAVE_avx512vl_vec_dupv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3064#define HAVE_avx512vl_vec_dupv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
3065#define HAVE_avx512vl_vec_dupv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3066#define HAVE_avx512f_vec_dupv8df (TARGET_AVX512F)
3067#define HAVE_avx512f_vec_dupv8df_mask (TARGET_AVX512F)
3068#define HAVE_avx512vl_vec_dupv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
3069#define HAVE_avx512vl_vec_dupv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3070#define HAVE_avx512vl_vec_dupv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
3071#define HAVE_avx512vl_vec_dupv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3072#define HAVE_avx512bw_vec_dupv64qi (TARGET_AVX512BW)
3073#define HAVE_avx512bw_vec_dupv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3074#define HAVE_avx512vl_vec_dupv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3075#define HAVE_avx512vl_vec_dupv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3076#define HAVE_avx512vl_vec_dupv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3077#define HAVE_avx512vl_vec_dupv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3078#define HAVE_avx512bw_vec_dupv32hi (TARGET_AVX512BW)
3079#define HAVE_avx512bw_vec_dupv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3080#define HAVE_avx512vl_vec_dupv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3081#define HAVE_avx512vl_vec_dupv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3082#define HAVE_avx512vl_vec_dupv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3083#define HAVE_avx512vl_vec_dupv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3084#define HAVE_avx512f_broadcastv16sf_mask (TARGET_AVX512F)
3085#define HAVE_avx512f_broadcastv16si_mask (TARGET_AVX512F)
3086#define HAVE_avx512f_broadcastv8df_mask (TARGET_AVX512F)
3087#define HAVE_avx512f_broadcastv8di_mask (TARGET_AVX512F)
3088#define HAVE_avx512bw_vec_dup_gprv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3089#define HAVE_avx512vl_vec_dup_gprv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3090#define HAVE_avx512vl_vec_dup_gprv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3091#define HAVE_avx512bw_vec_dup_gprv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3092#define HAVE_avx512vl_vec_dup_gprv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3093#define HAVE_avx512vl_vec_dup_gprv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3094#define HAVE_avx512f_vec_dup_gprv16si_mask (TARGET_AVX512F)
3095#define HAVE_avx512vl_vec_dup_gprv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3096#define HAVE_avx512vl_vec_dup_gprv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3097#define HAVE_avx512f_vec_dup_gprv8di_mask (TARGET_AVX512F)
3098#define HAVE_avx512vl_vec_dup_gprv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3099#define HAVE_avx512vl_vec_dup_gprv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3100#define HAVE_avx512f_vec_dup_gprv16sf_mask (TARGET_AVX512F)
3101#define HAVE_avx512vl_vec_dup_gprv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3102#define HAVE_avx512vl_vec_dup_gprv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3103#define HAVE_avx512f_vec_dup_gprv8df_mask (TARGET_AVX512F)
3104#define HAVE_avx512vl_vec_dup_gprv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3105#define HAVE_avx512vl_vec_dup_gprv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3106#define HAVE_vec_dupv4sf (TARGET_SSE)
3107#define HAVE_avx2_vbroadcasti128_v32qi (TARGET_AVX2)
3108#define HAVE_avx2_vbroadcasti128_v16hi (TARGET_AVX2)
3109#define HAVE_avx2_vbroadcasti128_v8si (TARGET_AVX2)
3110#define HAVE_avx2_vbroadcasti128_v4di (TARGET_AVX2)
3111#define HAVE_vec_dupv8si (TARGET_AVX)
3112#define HAVE_vec_dupv8sf (TARGET_AVX)
3113#define HAVE_vec_dupv4di (TARGET_AVX)
3114#define HAVE_vec_dupv4df (TARGET_AVX)
3115#define HAVE_avx_vbroadcastf128_v32qi (TARGET_AVX)
3116#define HAVE_avx_vbroadcastf128_v16hi (TARGET_AVX)
3117#define HAVE_avx_vbroadcastf128_v8si (TARGET_AVX)
3118#define HAVE_avx_vbroadcastf128_v4di (TARGET_AVX)
3119#define HAVE_avx_vbroadcastf128_v8sf (TARGET_AVX)
3120#define HAVE_avx_vbroadcastf128_v4df (TARGET_AVX)
3121#define HAVE_avx512dq_broadcastv16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3122#define HAVE_avx512dq_broadcastv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3123#define HAVE_avx512dq_broadcastv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3124#define HAVE_avx512dq_broadcastv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3125#define HAVE_avx512dq_broadcastv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3126#define HAVE_avx512vl_broadcastv8si_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3127#define HAVE_avx512vl_broadcastv8sf_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3128#define HAVE_avx512dq_broadcastv16sf_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3129#define HAVE_avx512dq_broadcastv16si_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3130#define HAVE_avx512dq_broadcastv8di_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3131#define HAVE_avx512dq_broadcastv8df_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3132#define HAVE_avx512dq_broadcastv4di_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3133#define HAVE_avx512dq_broadcastv4df_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3134#define HAVE_avx512cd_maskb_vec_dupv8di (TARGET_AVX512CD)
3135#define HAVE_avx512cd_maskb_vec_dupv4di ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3136#define HAVE_avx512cd_maskb_vec_dupv2di ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3137#define HAVE_avx512cd_maskw_vec_dupv16si (TARGET_AVX512CD)
3138#define HAVE_avx512cd_maskw_vec_dupv8si ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3139#define HAVE_avx512cd_maskw_vec_dupv4si ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3140#define HAVE_avx512f_vpermilvarv16sf3 ((TARGET_AVX && 1) && (TARGET_AVX512F))
3141#define HAVE_avx512f_vpermilvarv16sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3142#define HAVE_avx_vpermilvarv8sf3 ((TARGET_AVX && 1) && (TARGET_AVX))
3143#define HAVE_avx_vpermilvarv8sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
3144#define HAVE_avx_vpermilvarv4sf3 (TARGET_AVX && 1)
3145#define HAVE_avx_vpermilvarv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
3146#define HAVE_avx512f_vpermilvarv8df3 ((TARGET_AVX && 1) && (TARGET_AVX512F))
3147#define HAVE_avx512f_vpermilvarv8df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3148#define HAVE_avx_vpermilvarv4df3 ((TARGET_AVX && 1) && (TARGET_AVX))
3149#define HAVE_avx_vpermilvarv4df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
3150#define HAVE_avx_vpermilvarv2df3 ((TARGET_AVX && 1) && (TARGET_SSE2))
3151#define HAVE_avx_vpermilvarv2df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (16 == 64 || TARGET_AVX512VL)) && (TARGET_SSE2)))
3152#define HAVE_avx512f_vpermi2varv16si3 (TARGET_AVX512F)
3153#define HAVE_avx512f_vpermi2varv16si3_maskz_1 (TARGET_AVX512F)
3154#define HAVE_avx512f_vpermi2varv16sf3 (TARGET_AVX512F)
3155#define HAVE_avx512f_vpermi2varv16sf3_maskz_1 (TARGET_AVX512F)
3156#define HAVE_avx512f_vpermi2varv8di3 (TARGET_AVX512F)
3157#define HAVE_avx512f_vpermi2varv8di3_maskz_1 (TARGET_AVX512F)
3158#define HAVE_avx512f_vpermi2varv8df3 (TARGET_AVX512F)
3159#define HAVE_avx512f_vpermi2varv8df3_maskz_1 (TARGET_AVX512F)
3160#define HAVE_avx512vl_vpermi2varv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3161#define HAVE_avx512vl_vpermi2varv8si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3162#define HAVE_avx512vl_vpermi2varv8sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3163#define HAVE_avx512vl_vpermi2varv8sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3164#define HAVE_avx512vl_vpermi2varv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3165#define HAVE_avx512vl_vpermi2varv4di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3166#define HAVE_avx512vl_vpermi2varv4df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3167#define HAVE_avx512vl_vpermi2varv4df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3168#define HAVE_avx512vl_vpermi2varv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3169#define HAVE_avx512vl_vpermi2varv4si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3170#define HAVE_avx512vl_vpermi2varv4sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3171#define HAVE_avx512vl_vpermi2varv4sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3172#define HAVE_avx512vl_vpermi2varv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3173#define HAVE_avx512vl_vpermi2varv2di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3174#define HAVE_avx512vl_vpermi2varv2df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3175#define HAVE_avx512vl_vpermi2varv2df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3176#define HAVE_avx512bw_vpermi2varv64qi3 (TARGET_AVX512VBMI)
3177#define HAVE_avx512bw_vpermi2varv64qi3_maskz_1 (TARGET_AVX512VBMI)
3178#define HAVE_avx512vl_vpermi2varv16qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3179#define HAVE_avx512vl_vpermi2varv16qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3180#define HAVE_avx512vl_vpermi2varv32qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3181#define HAVE_avx512vl_vpermi2varv32qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3182#define HAVE_avx512vl_vpermi2varv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3183#define HAVE_avx512vl_vpermi2varv8hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3184#define HAVE_avx512vl_vpermi2varv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3185#define HAVE_avx512vl_vpermi2varv16hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3186#define HAVE_avx512bw_vpermi2varv32hi3 (TARGET_AVX512BW)
3187#define HAVE_avx512bw_vpermi2varv32hi3_maskz_1 (TARGET_AVX512BW)
3188#define HAVE_avx512f_vpermi2varv16si3_mask (TARGET_AVX512F)
3189#define HAVE_avx512f_vpermi2varv16sf3_mask (TARGET_AVX512F)
3190#define HAVE_avx512f_vpermi2varv8di3_mask (TARGET_AVX512F)
3191#define HAVE_avx512f_vpermi2varv8df3_mask (TARGET_AVX512F)
3192#define HAVE_avx512vl_vpermi2varv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3193#define HAVE_avx512vl_vpermi2varv8sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3194#define HAVE_avx512vl_vpermi2varv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3195#define HAVE_avx512vl_vpermi2varv4df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3196#define HAVE_avx512vl_vpermi2varv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3197#define HAVE_avx512vl_vpermi2varv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3198#define HAVE_avx512vl_vpermi2varv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3199#define HAVE_avx512vl_vpermi2varv2df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3200#define HAVE_avx512bw_vpermi2varv64qi3_mask (TARGET_AVX512VBMI)
3201#define HAVE_avx512vl_vpermi2varv16qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3202#define HAVE_avx512vl_vpermi2varv32qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3203#define HAVE_avx512vl_vpermi2varv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3204#define HAVE_avx512vl_vpermi2varv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3205#define HAVE_avx512bw_vpermi2varv32hi3_mask (TARGET_AVX512BW)
3206#define HAVE_avx512f_vpermt2varv16si3 (TARGET_AVX512F)
3207#define HAVE_avx512f_vpermt2varv16si3_maskz_1 (TARGET_AVX512F)
3208#define HAVE_avx512f_vpermt2varv16sf3 (TARGET_AVX512F)
3209#define HAVE_avx512f_vpermt2varv16sf3_maskz_1 (TARGET_AVX512F)
3210#define HAVE_avx512f_vpermt2varv8di3 (TARGET_AVX512F)
3211#define HAVE_avx512f_vpermt2varv8di3_maskz_1 (TARGET_AVX512F)
3212#define HAVE_avx512f_vpermt2varv8df3 (TARGET_AVX512F)
3213#define HAVE_avx512f_vpermt2varv8df3_maskz_1 (TARGET_AVX512F)
3214#define HAVE_avx512vl_vpermt2varv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3215#define HAVE_avx512vl_vpermt2varv8si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3216#define HAVE_avx512vl_vpermt2varv8sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3217#define HAVE_avx512vl_vpermt2varv8sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3218#define HAVE_avx512vl_vpermt2varv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3219#define HAVE_avx512vl_vpermt2varv4di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3220#define HAVE_avx512vl_vpermt2varv4df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3221#define HAVE_avx512vl_vpermt2varv4df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3222#define HAVE_avx512vl_vpermt2varv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3223#define HAVE_avx512vl_vpermt2varv4si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3224#define HAVE_avx512vl_vpermt2varv4sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3225#define HAVE_avx512vl_vpermt2varv4sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3226#define HAVE_avx512vl_vpermt2varv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3227#define HAVE_avx512vl_vpermt2varv2di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3228#define HAVE_avx512vl_vpermt2varv2df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3229#define HAVE_avx512vl_vpermt2varv2df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
3230#define HAVE_avx512bw_vpermt2varv64qi3 (TARGET_AVX512VBMI)
3231#define HAVE_avx512bw_vpermt2varv64qi3_maskz_1 (TARGET_AVX512VBMI)
3232#define HAVE_avx512vl_vpermt2varv16qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3233#define HAVE_avx512vl_vpermt2varv16qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3234#define HAVE_avx512vl_vpermt2varv32qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3235#define HAVE_avx512vl_vpermt2varv32qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3236#define HAVE_avx512vl_vpermt2varv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3237#define HAVE_avx512vl_vpermt2varv8hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3238#define HAVE_avx512vl_vpermt2varv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3239#define HAVE_avx512vl_vpermt2varv16hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3240#define HAVE_avx512bw_vpermt2varv32hi3 (TARGET_AVX512BW)
3241#define HAVE_avx512bw_vpermt2varv32hi3_maskz_1 (TARGET_AVX512BW)
3242#define HAVE_avx512f_vpermt2varv16si3_mask (TARGET_AVX512F)
3243#define HAVE_avx512f_vpermt2varv16sf3_mask (TARGET_AVX512F)
3244#define HAVE_avx512f_vpermt2varv8di3_mask (TARGET_AVX512F)
3245#define HAVE_avx512f_vpermt2varv8df3_mask (TARGET_AVX512F)
3246#define HAVE_avx512vl_vpermt2varv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3247#define HAVE_avx512vl_vpermt2varv8sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3248#define HAVE_avx512vl_vpermt2varv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3249#define HAVE_avx512vl_vpermt2varv4df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3250#define HAVE_avx512vl_vpermt2varv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3251#define HAVE_avx512vl_vpermt2varv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3252#define HAVE_avx512vl_vpermt2varv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3253#define HAVE_avx512vl_vpermt2varv2df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3254#define HAVE_avx512bw_vpermt2varv64qi3_mask (TARGET_AVX512VBMI)
3255#define HAVE_avx512vl_vpermt2varv16qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3256#define HAVE_avx512vl_vpermt2varv32qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3257#define HAVE_avx512vl_vpermt2varv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3258#define HAVE_avx512vl_vpermt2varv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3259#define HAVE_avx512bw_vpermt2varv32hi3_mask (TARGET_AVX512BW)
3260#define HAVE_vec_set_lo_v4di (TARGET_AVX && 1)
3261#define HAVE_vec_set_lo_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512DQ))
3262#define HAVE_vec_set_lo_v4df (TARGET_AVX && 1)
3263#define HAVE_vec_set_lo_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512DQ))
3264#define HAVE_vec_set_hi_v4di (TARGET_AVX && 1)
3265#define HAVE_vec_set_hi_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512DQ))
3266#define HAVE_vec_set_hi_v4df (TARGET_AVX && 1)
3267#define HAVE_vec_set_hi_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512DQ))
3268#define HAVE_vec_set_lo_v8si (TARGET_AVX)
3269#define HAVE_vec_set_lo_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX))
3270#define HAVE_vec_set_lo_v8sf (TARGET_AVX)
3271#define HAVE_vec_set_lo_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX))
3272#define HAVE_vec_set_hi_v8si (TARGET_AVX)
3273#define HAVE_vec_set_hi_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX))
3274#define HAVE_vec_set_hi_v8sf (TARGET_AVX)
3275#define HAVE_vec_set_hi_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX))
3276#define HAVE_vec_set_lo_v16hi (TARGET_AVX)
3277#define HAVE_vec_set_hi_v16hi (TARGET_AVX)
3278#define HAVE_vec_set_lo_v32qi (TARGET_AVX)
3279#define HAVE_vec_set_hi_v32qi (TARGET_AVX)
3280#define HAVE_avx_maskloadps (TARGET_AVX)
3281#define HAVE_avx_maskloadpd (TARGET_AVX)
3282#define HAVE_avx_maskloadps256 (TARGET_AVX)
3283#define HAVE_avx_maskloadpd256 (TARGET_AVX)
3284#define HAVE_avx2_maskloadd ((TARGET_AVX) && (TARGET_AVX2))
3285#define HAVE_avx2_maskloadq ((TARGET_AVX) && (TARGET_AVX2))
3286#define HAVE_avx2_maskloadd256 ((TARGET_AVX) && (TARGET_AVX2))
3287#define HAVE_avx2_maskloadq256 ((TARGET_AVX) && (TARGET_AVX2))
3288#define HAVE_avx_maskstoreps (TARGET_AVX)
3289#define HAVE_avx_maskstorepd (TARGET_AVX)
3290#define HAVE_avx_maskstoreps256 (TARGET_AVX)
3291#define HAVE_avx_maskstorepd256 (TARGET_AVX)
3292#define HAVE_avx2_maskstored ((TARGET_AVX) && (TARGET_AVX2))
3293#define HAVE_avx2_maskstoreq ((TARGET_AVX) && (TARGET_AVX2))
3294#define HAVE_avx2_maskstored256 ((TARGET_AVX) && (TARGET_AVX2))
3295#define HAVE_avx2_maskstoreq256 ((TARGET_AVX) && (TARGET_AVX2))
3296#define HAVE_avx_si256_si (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3297#define HAVE_avx_ps256_ps (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3298#define HAVE_avx_pd256_pd (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3299#define HAVE_avx2_ashrvv4si (TARGET_AVX2 && 1)
3300#define HAVE_avx2_ashrvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
3301#define HAVE_avx2_ashrvv8si (TARGET_AVX2 && 1)
3302#define HAVE_avx2_ashrvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3303#define HAVE_avx512f_ashrvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3304#define HAVE_avx512f_ashrvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3305#define HAVE_avx2_ashrvv2di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
3306#define HAVE_avx2_ashrvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3307#define HAVE_avx2_ashrvv4di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
3308#define HAVE_avx2_ashrvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
3309#define HAVE_avx512f_ashrvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3310#define HAVE_avx512f_ashrvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3311#define HAVE_avx512vl_ashrvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3312#define HAVE_avx512vl_ashrvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3313#define HAVE_avx512vl_ashrvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3314#define HAVE_avx512vl_ashrvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3315#define HAVE_avx512bw_ashrvv32hi (TARGET_AVX512BW)
3316#define HAVE_avx512bw_ashrvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3317#define HAVE_avx512f_ashlvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3318#define HAVE_avx512f_ashlvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3319#define HAVE_avx512f_lshrvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3320#define HAVE_avx512f_lshrvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3321#define HAVE_avx2_ashlvv8si (TARGET_AVX2 && 1)
3322#define HAVE_avx2_ashlvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3323#define HAVE_avx2_lshrvv8si (TARGET_AVX2 && 1)
3324#define HAVE_avx2_lshrvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3325#define HAVE_avx2_ashlvv4si (TARGET_AVX2 && 1)
3326#define HAVE_avx2_ashlvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
3327#define HAVE_avx2_lshrvv4si (TARGET_AVX2 && 1)
3328#define HAVE_avx2_lshrvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
3329#define HAVE_avx512f_ashlvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3330#define HAVE_avx512f_ashlvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3331#define HAVE_avx512f_lshrvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
3332#define HAVE_avx512f_lshrvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
3333#define HAVE_avx2_ashlvv4di (TARGET_AVX2 && 1)
3334#define HAVE_avx2_ashlvv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3335#define HAVE_avx2_lshrvv4di (TARGET_AVX2 && 1)
3336#define HAVE_avx2_lshrvv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
3337#define HAVE_avx2_ashlvv2di (TARGET_AVX2 && 1)
3338#define HAVE_avx2_ashlvv2di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
3339#define HAVE_avx2_lshrvv2di (TARGET_AVX2 && 1)
3340#define HAVE_avx2_lshrvv2di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
3341#define HAVE_avx512vl_ashlvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3342#define HAVE_avx512vl_ashlvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3343#define HAVE_avx512vl_lshrvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3344#define HAVE_avx512vl_lshrvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3345#define HAVE_avx512vl_ashlvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3346#define HAVE_avx512vl_ashlvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3347#define HAVE_avx512vl_lshrvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
3348#define HAVE_avx512vl_lshrvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3349#define HAVE_avx512bw_ashlvv32hi (TARGET_AVX512BW)
3350#define HAVE_avx512bw_ashlvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3351#define HAVE_avx512bw_lshrvv32hi (TARGET_AVX512BW)
3352#define HAVE_avx512bw_lshrvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3353#define HAVE_avx_vec_concatv32qi (TARGET_AVX)
3354#define HAVE_avx_vec_concatv16hi (TARGET_AVX)
3355#define HAVE_avx_vec_concatv8si (TARGET_AVX)
3356#define HAVE_avx_vec_concatv4di (TARGET_AVX)
3357#define HAVE_avx_vec_concatv8sf (TARGET_AVX)
3358#define HAVE_avx_vec_concatv4df (TARGET_AVX)
3359#define HAVE_avx_vec_concatv64qi ((TARGET_AVX) && (TARGET_AVX512F))
3360#define HAVE_avx_vec_concatv32hi ((TARGET_AVX) && (TARGET_AVX512F))
3361#define HAVE_avx_vec_concatv16si ((TARGET_AVX) && (TARGET_AVX512F))
3362#define HAVE_avx_vec_concatv8di ((TARGET_AVX) && (TARGET_AVX512F))
3363#define HAVE_avx_vec_concatv16sf ((TARGET_AVX) && (TARGET_AVX512F))
3364#define HAVE_avx_vec_concatv8df ((TARGET_AVX) && (TARGET_AVX512F))
3365#define HAVE_vcvtph2ps (TARGET_F16C || TARGET_AVX512VL)
3366#define HAVE_vcvtph2ps_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
3367#define HAVE_vcvtph2ps256 (TARGET_F16C || TARGET_AVX512VL)
3368#define HAVE_vcvtph2ps256_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
3369#define HAVE_avx512f_vcvtph2ps512_mask (TARGET_AVX512F)
3370#define HAVE_avx512f_vcvtph2ps512_mask_round (TARGET_AVX512F)
3371#define HAVE_vcvtps2ph256 (TARGET_F16C || TARGET_AVX512VL)
3372#define HAVE_vcvtps2ph256_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
3373#define HAVE_avx512f_vcvtps2ph512_mask (TARGET_AVX512F)
3374#define HAVE_avx512f_compressv16si_mask (TARGET_AVX512F)
3375#define HAVE_avx512f_compressv16sf_mask (TARGET_AVX512F)
3376#define HAVE_avx512f_compressv8di_mask (TARGET_AVX512F)
3377#define HAVE_avx512f_compressv8df_mask (TARGET_AVX512F)
3378#define HAVE_avx512vl_compressv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3379#define HAVE_avx512vl_compressv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3380#define HAVE_avx512vl_compressv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3381#define HAVE_avx512vl_compressv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3382#define HAVE_avx512vl_compressv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3383#define HAVE_avx512vl_compressv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3384#define HAVE_avx512vl_compressv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3385#define HAVE_avx512vl_compressv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3386#define HAVE_avx512f_compressstorev16si_mask (TARGET_AVX512F)
3387#define HAVE_avx512f_compressstorev16sf_mask (TARGET_AVX512F)
3388#define HAVE_avx512f_compressstorev8di_mask (TARGET_AVX512F)
3389#define HAVE_avx512f_compressstorev8df_mask (TARGET_AVX512F)
3390#define HAVE_avx512vl_compressstorev8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3391#define HAVE_avx512vl_compressstorev8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3392#define HAVE_avx512vl_compressstorev4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3393#define HAVE_avx512vl_compressstorev4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3394#define HAVE_avx512vl_compressstorev4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3395#define HAVE_avx512vl_compressstorev4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3396#define HAVE_avx512vl_compressstorev2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3397#define HAVE_avx512vl_compressstorev2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3398#define HAVE_avx512f_expandv16si_mask (TARGET_AVX512F)
3399#define HAVE_avx512f_expandv16sf_mask (TARGET_AVX512F)
3400#define HAVE_avx512f_expandv8di_mask (TARGET_AVX512F)
3401#define HAVE_avx512f_expandv8df_mask (TARGET_AVX512F)
3402#define HAVE_avx512vl_expandv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3403#define HAVE_avx512vl_expandv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3404#define HAVE_avx512vl_expandv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3405#define HAVE_avx512vl_expandv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3406#define HAVE_avx512vl_expandv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3407#define HAVE_avx512vl_expandv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3408#define HAVE_avx512vl_expandv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3409#define HAVE_avx512vl_expandv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
3410#define HAVE_avx512dq_rangepv16sf (TARGET_AVX512DQ && 1)
3411#define HAVE_avx512dq_rangepv16sf_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V16SFmode == V16SFmode \
3412 || V16SFmode == V8DFmode \
3413 || V16SFmode == V8DImode \
3414 || V16SFmode == V16SImode)))
3415#define HAVE_avx512dq_rangepv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
3416#define HAVE_avx512dq_rangepv16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V16SFmode == V16SFmode \
3417 || V16SFmode == V8DFmode \
3418 || V16SFmode == V8DImode \
3419 || V16SFmode == V16SImode))))
3420#define HAVE_avx512dq_rangepv8sf ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
3421#define HAVE_avx512dq_rangepv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
3422#define HAVE_avx512dq_rangepv4sf ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
3423#define HAVE_avx512dq_rangepv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
3424#define HAVE_avx512dq_rangepv8df (TARGET_AVX512DQ && 1)
3425#define HAVE_avx512dq_rangepv8df_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
3426 || V8DFmode == V8DFmode \
3427 || V8DFmode == V8DImode \
3428 || V8DFmode == V16SImode)))
3429#define HAVE_avx512dq_rangepv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
3430#define HAVE_avx512dq_rangepv8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
3431 || V8DFmode == V8DFmode \
3432 || V8DFmode == V8DImode \
3433 || V8DFmode == V16SImode))))
3434#define HAVE_avx512dq_rangepv4df ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
3435#define HAVE_avx512dq_rangepv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
3436#define HAVE_avx512dq_rangepv2df ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
3437#define HAVE_avx512dq_rangepv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
3438#define HAVE_avx512dq_rangesv4sf (TARGET_AVX512DQ)
3439#define HAVE_avx512dq_rangesv4sf_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3440#define HAVE_avx512dq_rangesv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
3441#define HAVE_avx512dq_rangesv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_SSE2)))
3442#define HAVE_avx512dq_fpclassv16sf (TARGET_AVX512DQ)
3443#define HAVE_avx512dq_fpclassv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3444#define HAVE_avx512dq_fpclassv8sf ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
3445#define HAVE_avx512dq_fpclassv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3446#define HAVE_avx512dq_fpclassv4sf ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
3447#define HAVE_avx512dq_fpclassv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3448#define HAVE_avx512dq_fpclassv8df (TARGET_AVX512DQ)
3449#define HAVE_avx512dq_fpclassv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
3450#define HAVE_avx512dq_fpclassv4df ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
3451#define HAVE_avx512dq_fpclassv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3452#define HAVE_avx512dq_fpclassv2df ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
3453#define HAVE_avx512dq_fpclassv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
3454#define HAVE_avx512dq_vmfpclassv4sf (TARGET_AVX512DQ)
3455#define HAVE_avx512dq_vmfpclassv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
3456#define HAVE_avx512f_getmantv16sf (TARGET_AVX512F)
3457#define HAVE_avx512f_getmantv16sf_round (TARGET_AVX512F)
3458#define HAVE_avx512f_getmantv16sf_mask (TARGET_AVX512F)
3459#define HAVE_avx512f_getmantv16sf_mask_round (TARGET_AVX512F)
3460#define HAVE_avx512vl_getmantv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
3461#define HAVE_avx512vl_getmantv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3462#define HAVE_avx512vl_getmantv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3463#define HAVE_avx512vl_getmantv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
3464#define HAVE_avx512vl_getmantv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
3465#define HAVE_avx512vl_getmantv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3466#define HAVE_avx512vl_getmantv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3467#define HAVE_avx512vl_getmantv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
3468#define HAVE_avx512f_getmantv8df (TARGET_AVX512F)
3469#define HAVE_avx512f_getmantv8df_round (TARGET_AVX512F)
3470#define HAVE_avx512f_getmantv8df_mask (TARGET_AVX512F)
3471#define HAVE_avx512f_getmantv8df_mask_round (TARGET_AVX512F)
3472#define HAVE_avx512vl_getmantv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
3473#define HAVE_avx512vl_getmantv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3474#define HAVE_avx512vl_getmantv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3475#define HAVE_avx512vl_getmantv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
3476#define HAVE_avx512vl_getmantv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
3477#define HAVE_avx512vl_getmantv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3478#define HAVE_avx512vl_getmantv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
3479#define HAVE_avx512vl_getmantv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
3480#define HAVE_avx512f_vgetmantv4sf (TARGET_AVX512F)
3481#define HAVE_avx512f_vgetmantv4sf_round (TARGET_AVX512F)
3482#define HAVE_avx512f_vgetmantv2df ((TARGET_AVX512F) && (TARGET_SSE2))
3483#define HAVE_avx512f_vgetmantv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
3484#define HAVE_avx512bw_dbpsadbwv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3485#define HAVE_avx512bw_dbpsadbwv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
3486#define HAVE_avx512bw_dbpsadbwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
3487#define HAVE_clzv16si2 (TARGET_AVX512CD)
3488#define HAVE_clzv16si2_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
3489#define HAVE_clzv8si2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3490#define HAVE_clzv8si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3491#define HAVE_clzv4si2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3492#define HAVE_clzv4si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3493#define HAVE_clzv8di2 (TARGET_AVX512CD)
3494#define HAVE_clzv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
3495#define HAVE_clzv4di2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3496#define HAVE_clzv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3497#define HAVE_clzv2di2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
3498#define HAVE_clzv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3499#define HAVE_conflictv16si_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
3500#define HAVE_conflictv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3501#define HAVE_conflictv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3502#define HAVE_conflictv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
3503#define HAVE_conflictv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3504#define HAVE_conflictv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
3505#define HAVE_sha1msg1 (TARGET_SHA)
3506#define HAVE_sha1msg2 (TARGET_SHA)
3507#define HAVE_sha1nexte (TARGET_SHA)
3508#define HAVE_sha1rnds4 (TARGET_SHA)
3509#define HAVE_sha256msg1 (TARGET_SHA)
3510#define HAVE_sha256msg2 (TARGET_SHA)
3511#define HAVE_sha256rnds2 (TARGET_SHA)
3512#define HAVE_avx512f_si512_si (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3513#define HAVE_avx512f_ps512_ps (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3514#define HAVE_avx512f_pd512_pd (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3515#define HAVE_avx512f_si512_256si (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3516#define HAVE_avx512f_ps512_256ps (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3517#define HAVE_avx512f_pd512_256pd (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3518#define HAVE_vpamdd52luqv8di (TARGET_AVX512IFMA)
3519#define HAVE_vpamdd52luqv8di_maskz_1 (TARGET_AVX512IFMA)
3520#define HAVE_vpamdd52huqv8di (TARGET_AVX512IFMA)
3521#define HAVE_vpamdd52huqv8di_maskz_1 (TARGET_AVX512IFMA)
3522#define HAVE_vpamdd52luqv4di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3523#define HAVE_vpamdd52luqv4di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3524#define HAVE_vpamdd52huqv4di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3525#define HAVE_vpamdd52huqv4di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3526#define HAVE_vpamdd52luqv2di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3527#define HAVE_vpamdd52luqv2di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3528#define HAVE_vpamdd52huqv2di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3529#define HAVE_vpamdd52huqv2di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3530#define HAVE_vpamdd52luqv8di_mask (TARGET_AVX512IFMA)
3531#define HAVE_vpamdd52huqv8di_mask (TARGET_AVX512IFMA)
3532#define HAVE_vpamdd52luqv4di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3533#define HAVE_vpamdd52huqv4di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3534#define HAVE_vpamdd52luqv2di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3535#define HAVE_vpamdd52huqv2di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
3536#define HAVE_vpmultishiftqbv64qi (TARGET_AVX512VBMI)
3537#define HAVE_vpmultishiftqbv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512VBMI))
3538#define HAVE_vpmultishiftqbv16qi ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3539#define HAVE_vpmultishiftqbv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI) && (TARGET_AVX512VL)))
3540#define HAVE_vpmultishiftqbv32qi ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
3541#define HAVE_vpmultishiftqbv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI) && (TARGET_AVX512VL)))
3542#define HAVE_avx5124fmaddps_4fmaddps (TARGET_AVX5124FMAPS)
3543#define HAVE_avx5124fmaddps_4fmaddps_mask (TARGET_AVX5124FMAPS)
3544#define HAVE_avx5124fmaddps_4fmaddps_maskz (TARGET_AVX5124FMAPS)
3545#define HAVE_avx5124fmaddps_4fmaddss (TARGET_AVX5124FMAPS)
3546#define HAVE_avx5124fmaddps_4fmaddss_mask (TARGET_AVX5124FMAPS)
3547#define HAVE_avx5124fmaddps_4fmaddss_maskz (TARGET_AVX5124FMAPS)
3548#define HAVE_avx5124fmaddps_4fnmaddps (TARGET_AVX5124FMAPS)
3549#define HAVE_avx5124fmaddps_4fnmaddps_mask (TARGET_AVX5124FMAPS)
3550#define HAVE_avx5124fmaddps_4fnmaddps_maskz (TARGET_AVX5124FMAPS)
3551#define HAVE_avx5124fmaddps_4fnmaddss (TARGET_AVX5124FMAPS)
3552#define HAVE_avx5124fmaddps_4fnmaddss_mask (TARGET_AVX5124FMAPS)
3553#define HAVE_avx5124fmaddps_4fnmaddss_maskz (TARGET_AVX5124FMAPS)
3554#define HAVE_avx5124vnniw_vp4dpwssd (TARGET_AVX5124VNNIW)
3555#define HAVE_avx5124vnniw_vp4dpwssd_mask (TARGET_AVX5124VNNIW)
3556#define HAVE_avx5124vnniw_vp4dpwssd_maskz (TARGET_AVX5124VNNIW)
3557#define HAVE_avx5124vnniw_vp4dpwssds (TARGET_AVX5124VNNIW)
3558#define HAVE_avx5124vnniw_vp4dpwssds_mask (TARGET_AVX5124VNNIW)
3559#define HAVE_avx5124vnniw_vp4dpwssds_maskz (TARGET_AVX5124VNNIW)
3560#define HAVE_vpopcountv16si (TARGET_AVX512VPOPCNTDQ)
3561#define HAVE_vpopcountv16si_mask ((TARGET_AVX512F) && (TARGET_AVX512VPOPCNTDQ))
3562#define HAVE_vpopcountv8di (TARGET_AVX512VPOPCNTDQ)
3563#define HAVE_vpopcountv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512VPOPCNTDQ))
3564#define HAVE_mfence_sse2 (TARGET_64BIT || TARGET_SSE2)
3565#define HAVE_mfence_nosse (!(TARGET_64BIT || TARGET_SSE2))
3566#define HAVE_atomic_loaddi_fpu (!TARGET_64BIT && (TARGET_80387 || TARGET_SSE))
3567#define HAVE_atomic_storeqi_1 1
3568#define HAVE_atomic_storehi_1 1
3569#define HAVE_atomic_storesi_1 1
3570#define HAVE_atomic_storedi_1 (TARGET_64BIT)
3571#define HAVE_atomic_storedi_fpu (!TARGET_64BIT && (TARGET_80387 || TARGET_SSE))
3572#define HAVE_loaddi_via_fpu (TARGET_80387)
3573#define HAVE_storedi_via_fpu (TARGET_80387)
3574#define HAVE_loaddi_via_sse (TARGET_SSE)
3575#define HAVE_storedi_via_sse (TARGET_SSE)
3576#define HAVE_atomic_compare_and_swapdi_doubleword ((TARGET_CMPXCHG8B) && (!TARGET_64BIT))
3577#define HAVE_atomic_compare_and_swapti_doubleword ((TARGET_CMPXCHG16B) && (TARGET_64BIT))
3578#define HAVE_atomic_compare_and_swapqi_1 (TARGET_CMPXCHG)
3579#define HAVE_atomic_compare_and_swaphi_1 (TARGET_CMPXCHG)
3580#define HAVE_atomic_compare_and_swapsi_1 (TARGET_CMPXCHG)
3581#define HAVE_atomic_compare_and_swapdi_1 ((TARGET_CMPXCHG) && (TARGET_64BIT))
3582#define HAVE_atomic_fetch_addqi (TARGET_XADD)
3583#define HAVE_atomic_fetch_addhi (TARGET_XADD)
3584#define HAVE_atomic_fetch_addsi (TARGET_XADD)
3585#define HAVE_atomic_fetch_adddi ((TARGET_XADD) && (TARGET_64BIT))
3586#define HAVE_atomic_exchangeqi 1
3587#define HAVE_atomic_exchangehi 1
3588#define HAVE_atomic_exchangesi 1
3589#define HAVE_atomic_exchangedi (TARGET_64BIT)
3590#define HAVE_atomic_addqi 1
3591#define HAVE_atomic_addhi 1
3592#define HAVE_atomic_addsi 1
3593#define HAVE_atomic_adddi (TARGET_64BIT)
3594#define HAVE_atomic_subqi 1
3595#define HAVE_atomic_subhi 1
3596#define HAVE_atomic_subsi 1
3597#define HAVE_atomic_subdi (TARGET_64BIT)
3598#define HAVE_atomic_andqi 1
3599#define HAVE_atomic_orqi 1
3600#define HAVE_atomic_xorqi 1
3601#define HAVE_atomic_andhi 1
3602#define HAVE_atomic_orhi 1
3603#define HAVE_atomic_xorhi 1
3604#define HAVE_atomic_andsi 1
3605#define HAVE_atomic_orsi 1
3606#define HAVE_atomic_xorsi 1
3607#define HAVE_atomic_anddi (TARGET_64BIT)
3608#define HAVE_atomic_ordi (TARGET_64BIT)
3609#define HAVE_atomic_xordi (TARGET_64BIT)
3610#define HAVE_atomic_bit_test_and_sethi_1 1
3611#define HAVE_atomic_bit_test_and_setsi_1 1
3612#define HAVE_atomic_bit_test_and_setdi_1 (TARGET_64BIT)
3613#define HAVE_atomic_bit_test_and_complementhi_1 1
3614#define HAVE_atomic_bit_test_and_complementsi_1 1
3615#define HAVE_atomic_bit_test_and_complementdi_1 (TARGET_64BIT)
3616#define HAVE_atomic_bit_test_and_resethi_1 1
3617#define HAVE_atomic_bit_test_and_resetsi_1 1
3618#define HAVE_atomic_bit_test_and_resetdi_1 (TARGET_64BIT)
3619#define HAVE_cbranchqi4 (TARGET_QIMODE_MATH)
3620#define HAVE_cbranchhi4 (TARGET_HIMODE_MATH)
3621#define HAVE_cbranchsi4 1
3622#define HAVE_cbranchdi4 1
3623#define HAVE_cbranchti4 (TARGET_64BIT)
3624#define HAVE_cstoreqi4 (TARGET_QIMODE_MATH)
3625#define HAVE_cstorehi4 (TARGET_HIMODE_MATH)
3626#define HAVE_cstoresi4 1
3627#define HAVE_cstoredi4 (TARGET_64BIT)
3628#define HAVE_cmpsi_1 1
3629#define HAVE_cmpdi_1 (TARGET_64BIT)
3630#define HAVE_cmpqi_ext_3 1
3631#define HAVE_cbranchxf4 (TARGET_80387)
3632#define HAVE_cstorexf4 (TARGET_80387)
3633#define HAVE_cbranchsf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3634#define HAVE_cbranchdf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3635#define HAVE_cstoresf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3636#define HAVE_cstoredf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3637#define HAVE_cbranchcc4 1
3638#define HAVE_cstorecc4 1
3639#define HAVE_reload_noff_store (TARGET_64BIT)
3640#define HAVE_reload_noff_load (TARGET_64BIT)
3641#define HAVE_movxi (TARGET_AVX512F)
3642#define HAVE_movoi (TARGET_AVX)
3643#define HAVE_movti (TARGET_64BIT || TARGET_SSE)
3644#define HAVE_movcdi 1
3645#define HAVE_movqi 1
3646#define HAVE_movhi 1
3647#define HAVE_movsi 1
3648#define HAVE_movdi 1
3649#define HAVE_movstrictqi 1
3650#define HAVE_movstricthi 1
3651#define HAVE_extvhi 1
3652#define HAVE_extvsi 1
3653#define HAVE_extzvhi 1
3654#define HAVE_extzvsi 1
3655#define HAVE_extzvdi (TARGET_64BIT)
3656#define HAVE_insvhi 1
3657#define HAVE_insvsi 1
3658#define HAVE_insvdi (TARGET_64BIT)
3659#define HAVE_movtf (TARGET_64BIT || TARGET_SSE)
3660#define HAVE_movsf 1
3661#define HAVE_movdf 1
3662#define HAVE_movxf 1
3663#define HAVE_zero_extendsidi2 1
3664#define HAVE_zero_extendqisi2 1
3665#define HAVE_zero_extendhisi2 1
3666#define HAVE_zero_extendqihi2 1
3667#define HAVE_extendsidi2 1
3668#define HAVE_extendsfdf2 (TARGET_80387 || (TARGET_SSE2 && TARGET_SSE_MATH))
3669#define HAVE_extendsfxf2 (TARGET_80387)
3670#define HAVE_extenddfxf2 (TARGET_80387)
3671#define HAVE_truncdfsf2 (TARGET_80387 || (TARGET_SSE2 && TARGET_SSE_MATH))
3672#define HAVE_truncdfsf2_with_temp 1
3673#define HAVE_truncxfsf2 (TARGET_80387)
3674#define HAVE_truncxfdf2 (TARGET_80387)
3675#define HAVE_fix_truncxfdi2 (TARGET_80387)
3676#define HAVE_fix_truncsfdi2 (TARGET_80387 || (TARGET_64BIT && SSE_FLOAT_MODE_P (SFmode)))
3677#define HAVE_fix_truncdfdi2 (TARGET_80387 || (TARGET_64BIT && SSE_FLOAT_MODE_P (DFmode)))
3678#define HAVE_fix_truncxfsi2 (TARGET_80387)
3679#define HAVE_fix_truncsfsi2 (TARGET_80387 || SSE_FLOAT_MODE_P (SFmode))
3680#define HAVE_fix_truncdfsi2 (TARGET_80387 || SSE_FLOAT_MODE_P (DFmode))
3681#define HAVE_fix_truncsfhi2 (TARGET_80387 \
3682 && !(SSE_FLOAT_MODE_P (SFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
3683#define HAVE_fix_truncdfhi2 (TARGET_80387 \
3684 && !(SSE_FLOAT_MODE_P (DFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
3685#define HAVE_fix_truncxfhi2 (TARGET_80387 \
3686 && !(SSE_FLOAT_MODE_P (XFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
3687#define HAVE_fixuns_truncsfsi2 (!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH)
3688#define HAVE_fixuns_truncdfsi2 (!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH)
3689#define HAVE_fixuns_truncsfhi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
3690#define HAVE_fixuns_truncdfhi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
3691#define HAVE_floatsisf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3692#define HAVE_floatdisf2 ((TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)) && (TARGET_64BIT))
3693#define HAVE_floatsidf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3694#define HAVE_floatdidf2 ((TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)) && (TARGET_64BIT))
3695#define HAVE_floatunsqisf2 (!TARGET_64BIT \
3696 && SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
3697#define HAVE_floatunshisf2 (!TARGET_64BIT \
3698 && SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
3699#define HAVE_floatunsqidf2 (!TARGET_64BIT \
3700 && SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
3701#define HAVE_floatunshidf2 (!TARGET_64BIT \
3702 && SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
3703#define HAVE_floatunssisf2 (!TARGET_64BIT \
3704 && ((TARGET_80387 && X87_ENABLE_FLOAT (SFmode, DImode) \
3705 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
3706 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)))
3707#define HAVE_floatunssidf2 (!TARGET_64BIT \
3708 && ((TARGET_80387 && X87_ENABLE_FLOAT (DFmode, DImode) \
3709 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
3710 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)))
3711#define HAVE_floatunssixf2 (!TARGET_64BIT \
3712 && ((TARGET_80387 && X87_ENABLE_FLOAT (XFmode, DImode) \
3713 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
3714 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH)))
3715#define HAVE_floatunsdisf2 (TARGET_64BIT && TARGET_SSE && TARGET_SSE_MATH)
3716#define HAVE_floatunsdidf2 ((TARGET_64BIT || TARGET_KEEPS_VECTOR_ALIGNED_STACK) \
3717 && TARGET_SSE2 && TARGET_SSE_MATH)
3718#define HAVE_addqi3 (TARGET_QIMODE_MATH)
3719#define HAVE_addhi3 (TARGET_HIMODE_MATH)
3720#define HAVE_addsi3 1
3721#define HAVE_adddi3 1
3722#define HAVE_addti3 (TARGET_64BIT)
3723#define HAVE_addvqi4 1
3724#define HAVE_addvhi4 1
3725#define HAVE_addvsi4 1
3726#define HAVE_addvdi4 (TARGET_64BIT)
3727#define HAVE_uaddvqi4 1
3728#define HAVE_uaddvhi4 1
3729#define HAVE_uaddvsi4 1
3730#define HAVE_uaddvdi4 (TARGET_64BIT)
3731#define HAVE_subqi3 (TARGET_QIMODE_MATH)
3732#define HAVE_subhi3 (TARGET_HIMODE_MATH)
3733#define HAVE_subsi3 1
3734#define HAVE_subdi3 1
3735#define HAVE_subti3 (TARGET_64BIT)
3736#define HAVE_subvqi4 1
3737#define HAVE_subvhi4 1
3738#define HAVE_subvsi4 1
3739#define HAVE_subvdi4 (TARGET_64BIT)
3740#define HAVE_usubvqi4 1
3741#define HAVE_usubvhi4 1
3742#define HAVE_usubvsi4 1
3743#define HAVE_usubvdi4 (TARGET_64BIT)
3744#define HAVE_addqi3_cconly_overflow (!(MEM_P (operands[0]) && MEM_P (operands[1])))
3745#define HAVE_addxf3 (TARGET_80387)
3746#define HAVE_subxf3 (TARGET_80387)
3747#define HAVE_addsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
3748 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3749#define HAVE_subsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
3750 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3751#define HAVE_adddf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
3752 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3753#define HAVE_subdf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
3754 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3755#define HAVE_mulhi3 (TARGET_HIMODE_MATH)
3756#define HAVE_mulsi3 1
3757#define HAVE_muldi3 (TARGET_64BIT)
3758#define HAVE_mulqi3 (TARGET_QIMODE_MATH)
3759#define HAVE_mulvhi4 1
3760#define HAVE_mulvsi4 1
3761#define HAVE_mulvdi4 (TARGET_64BIT)
3762#define HAVE_umulvhi4 1
3763#define HAVE_umulvsi4 1
3764#define HAVE_umulvdi4 (TARGET_64BIT)
3765#define HAVE_mulvqi4 (TARGET_QIMODE_MATH)
3766#define HAVE_umulvqi4 (TARGET_QIMODE_MATH)
3767#define HAVE_mulsidi3 (!TARGET_64BIT)
3768#define HAVE_umulsidi3 (!TARGET_64BIT)
3769#define HAVE_mulditi3 (TARGET_64BIT)
3770#define HAVE_umulditi3 (TARGET_64BIT)
3771#define HAVE_mulqihi3 (TARGET_QIMODE_MATH)
3772#define HAVE_umulqihi3 (TARGET_QIMODE_MATH)
3773#define HAVE_smulsi3_highpart 1
3774#define HAVE_umulsi3_highpart 1
3775#define HAVE_smuldi3_highpart (TARGET_64BIT)
3776#define HAVE_umuldi3_highpart (TARGET_64BIT)
3777#define HAVE_mulxf3 (TARGET_80387)
3778#define HAVE_mulsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
3779 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3780#define HAVE_muldf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
3781 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3782#define HAVE_divxf3 (TARGET_80387)
3783#define HAVE_divsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
3784 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3785#define HAVE_divdf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
3786 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3787#define HAVE_divmodhi4 (TARGET_HIMODE_MATH)
3788#define HAVE_divmodsi4 1
3789#define HAVE_divmoddi4 (TARGET_64BIT)
3790#define HAVE_divmodqi4 (TARGET_QIMODE_MATH)
3791#define HAVE_udivmodhi4 (TARGET_HIMODE_MATH)
3792#define HAVE_udivmodsi4 1
3793#define HAVE_udivmoddi4 (TARGET_64BIT)
3794#define HAVE_udivmodqi4 (TARGET_QIMODE_MATH)
3795#define HAVE_testsi_ccno_1 1
3796#define HAVE_testqi_ccz_1 1
3797#define HAVE_testdi_ccno_1 (TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1])))
3798#define HAVE_testqi_ext_1_ccno 1
3799#define HAVE_andqi3 (TARGET_QIMODE_MATH)
3800#define HAVE_andhi3 (TARGET_HIMODE_MATH)
3801#define HAVE_andsi3 1
3802#define HAVE_anddi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
3803#define HAVE_iorqi3 (TARGET_QIMODE_MATH)
3804#define HAVE_xorqi3 (TARGET_QIMODE_MATH)
3805#define HAVE_iorhi3 (TARGET_HIMODE_MATH)
3806#define HAVE_xorhi3 (TARGET_HIMODE_MATH)
3807#define HAVE_iorsi3 1
3808#define HAVE_xorsi3 1
3809#define HAVE_iordi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
3810#define HAVE_xordi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
3811#define HAVE_xorqi_ext_1_cc 1
3812#define HAVE_negqi2 (TARGET_QIMODE_MATH)
3813#define HAVE_neghi2 (TARGET_HIMODE_MATH)
3814#define HAVE_negsi2 1
3815#define HAVE_negdi2 1
3816#define HAVE_negti2 (TARGET_64BIT)
3817#define HAVE_negvqi3 1
3818#define HAVE_negvhi3 1
3819#define HAVE_negvsi3 1
3820#define HAVE_negvdi3 (TARGET_64BIT)
3821#define HAVE_abssf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3822#define HAVE_negsf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3823#define HAVE_absdf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3824#define HAVE_negdf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3825#define HAVE_absxf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
3826#define HAVE_negxf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
3827#define HAVE_abstf2 (TARGET_SSE)
3828#define HAVE_negtf2 (TARGET_SSE)
3829#define HAVE_copysignsf3 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3830 || (TARGET_SSE && (SFmode == TFmode)))
3831#define HAVE_copysigndf3 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3832 || (TARGET_SSE && (DFmode == TFmode)))
3833#define HAVE_copysigntf3 ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
3834 || (TARGET_SSE && (TFmode == TFmode)))
3835#define HAVE_one_cmplqi2 (TARGET_QIMODE_MATH)
3836#define HAVE_one_cmplhi2 (TARGET_HIMODE_MATH)
3837#define HAVE_one_cmplsi2 1
3838#define HAVE_one_cmpldi2 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
3839#define HAVE_ashlqi3 (TARGET_QIMODE_MATH)
3840#define HAVE_ashlhi3 (TARGET_HIMODE_MATH)
3841#define HAVE_ashlsi3 1
3842#define HAVE_ashldi3 1
3843#define HAVE_ashlti3 (TARGET_64BIT)
3844#define HAVE_x86_shiftsi_adj_1 (TARGET_CMOVE)
3845#define HAVE_x86_shiftdi_adj_1 ((TARGET_CMOVE) && (TARGET_64BIT))
3846#define HAVE_x86_shiftsi_adj_2 1
3847#define HAVE_x86_shiftdi_adj_2 (TARGET_64BIT)
3848#define HAVE_lshrqi3 (TARGET_QIMODE_MATH)
3849#define HAVE_ashrqi3 (TARGET_QIMODE_MATH)
3850#define HAVE_lshrhi3 (TARGET_HIMODE_MATH)
3851#define HAVE_ashrhi3 (TARGET_HIMODE_MATH)
3852#define HAVE_lshrsi3 1
3853#define HAVE_ashrsi3 1
3854#define HAVE_lshrdi3 1
3855#define HAVE_ashrdi3 1
3856#define HAVE_lshrti3 (TARGET_64BIT)
3857#define HAVE_ashrti3 (TARGET_64BIT)
3858#define HAVE_x86_shiftsi_adj_3 1
3859#define HAVE_x86_shiftdi_adj_3 (TARGET_64BIT)
3860#define HAVE_rotlti3 (TARGET_64BIT)
3861#define HAVE_rotrti3 (TARGET_64BIT)
3862#define HAVE_rotldi3 1
3863#define HAVE_rotrdi3 1
3864#define HAVE_rotlqi3 (TARGET_QIMODE_MATH)
3865#define HAVE_rotrqi3 (TARGET_QIMODE_MATH)
3866#define HAVE_rotlhi3 (TARGET_HIMODE_MATH)
3867#define HAVE_rotrhi3 (TARGET_HIMODE_MATH)
3868#define HAVE_rotlsi3 1
3869#define HAVE_rotrsi3 1
3870#define HAVE_indirect_jump 1
3871#define HAVE_tablejump 1
3872#define HAVE_call 1
3873#define HAVE_sibcall 1
3874#define HAVE_call_pop (!TARGET_64BIT)
3875#define HAVE_call_value 1
3876#define HAVE_sibcall_value 1
3877#define HAVE_call_value_pop (!TARGET_64BIT)
3878#define HAVE_untyped_call 1
3879#define HAVE_memory_blockage 1
3880#define HAVE_return (ix86_can_use_return_insn_p ())
3881#define HAVE_simple_return (!TARGET_SEH && !ix86_static_chain_on_stack)
3882#define HAVE_prologue 1
3883#define HAVE_set_got (!TARGET_64BIT)
3884#define HAVE_set_got_labelled (!TARGET_64BIT)
3885#define HAVE_epilogue 1
3886#define HAVE_sibcall_epilogue 1
3887#define HAVE_eh_return 1
3888#define HAVE_split_stack_prologue 1
3889#define HAVE_split_stack_space_check 1
3890#define HAVE_ffssi2 1
3891#define HAVE_ffsdi2 (TARGET_64BIT)
3892#define HAVE_clzsi2 1
3893#define HAVE_clzdi2 (TARGET_64BIT)
3894#define HAVE_bmi2_bzhi_si3 (TARGET_BMI2)
3895#define HAVE_bmi2_bzhi_di3 ((TARGET_BMI2) && (TARGET_64BIT))
3896#define HAVE_bswapdi2 (TARGET_64BIT)
3897#define HAVE_bswapsi2 1
3898#define HAVE_paritydi2 (! TARGET_POPCNT)
3899#define HAVE_paritysi2 (! TARGET_POPCNT)
3900#define HAVE_tls_global_dynamic_32 1
3901#define HAVE_tls_global_dynamic_64_si ((TARGET_64BIT) && (Pmode == SImode))
3902#define HAVE_tls_global_dynamic_64_di ((TARGET_64BIT) && (Pmode == DImode))
3903#define HAVE_tls_local_dynamic_base_32 1
3904#define HAVE_tls_local_dynamic_base_64_si ((TARGET_64BIT) && (Pmode == SImode))
3905#define HAVE_tls_local_dynamic_base_64_di ((TARGET_64BIT) && (Pmode == DImode))
3906#define HAVE_tls_dynamic_gnu2_32 (!TARGET_64BIT && TARGET_GNU2_TLS)
3907#define HAVE_tls_dynamic_gnu2_64 (TARGET_64BIT && TARGET_GNU2_TLS)
3908#define HAVE_rsqrtsf2 (TARGET_SSE && TARGET_SSE_MATH)
3909#define HAVE_sqrtsf2 ((TARGET_USE_FANCY_MATH_387 && X87_ENABLE_ARITH (SFmode)) \
3910 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
3911#define HAVE_sqrtdf2 ((TARGET_USE_FANCY_MATH_387 && X87_ENABLE_ARITH (DFmode)) \
3912 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
3913#define HAVE_fmodxf3 (TARGET_USE_FANCY_MATH_387 \
3914 && flag_finite_math_only)
3915#define HAVE_fmodsf3 (TARGET_USE_FANCY_MATH_387 \
3916 && flag_finite_math_only)
3917#define HAVE_fmoddf3 (TARGET_USE_FANCY_MATH_387 \
3918 && flag_finite_math_only)
3919#define HAVE_remainderxf3 (TARGET_USE_FANCY_MATH_387 \
3920 && flag_finite_math_only)
3921#define HAVE_remaindersf3 (TARGET_USE_FANCY_MATH_387 \
3922 && flag_finite_math_only)
3923#define HAVE_remainderdf3 (TARGET_USE_FANCY_MATH_387 \
3924 && flag_finite_math_only)
3925#define HAVE_sincossf3 (TARGET_USE_FANCY_MATH_387 \
3926 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3927 || TARGET_MIX_SSE_I387) \
3928 && flag_unsafe_math_optimizations)
3929#define HAVE_sincosdf3 (TARGET_USE_FANCY_MATH_387 \
3930 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3931 || TARGET_MIX_SSE_I387) \
3932 && flag_unsafe_math_optimizations)
3933#define HAVE_tanxf2 (TARGET_USE_FANCY_MATH_387 \
3934 && flag_unsafe_math_optimizations)
3935#define HAVE_tansf2 (TARGET_USE_FANCY_MATH_387 \
3936 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3937 || TARGET_MIX_SSE_I387) \
3938 && flag_unsafe_math_optimizations)
3939#define HAVE_tandf2 (TARGET_USE_FANCY_MATH_387 \
3940 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3941 || TARGET_MIX_SSE_I387) \
3942 && flag_unsafe_math_optimizations)
3943#define HAVE_atan2xf3 (TARGET_USE_FANCY_MATH_387 \
3944 && flag_unsafe_math_optimizations)
3945#define HAVE_atan2sf3 (TARGET_USE_FANCY_MATH_387 \
3946 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3947 || TARGET_MIX_SSE_I387) \
3948 && flag_unsafe_math_optimizations)
3949#define HAVE_atan2df3 (TARGET_USE_FANCY_MATH_387 \
3950 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3951 || TARGET_MIX_SSE_I387) \
3952 && flag_unsafe_math_optimizations)
3953#define HAVE_atanxf2 (TARGET_USE_FANCY_MATH_387 \
3954 && flag_unsafe_math_optimizations)
3955#define HAVE_atansf2 (TARGET_USE_FANCY_MATH_387 \
3956 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3957 || TARGET_MIX_SSE_I387) \
3958 && flag_unsafe_math_optimizations)
3959#define HAVE_atandf2 (TARGET_USE_FANCY_MATH_387 \
3960 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3961 || TARGET_MIX_SSE_I387) \
3962 && flag_unsafe_math_optimizations)
3963#define HAVE_asinxf2 (TARGET_USE_FANCY_MATH_387 \
3964 && flag_unsafe_math_optimizations)
3965#define HAVE_asinsf2 (TARGET_USE_FANCY_MATH_387 \
3966 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3967 || TARGET_MIX_SSE_I387) \
3968 && flag_unsafe_math_optimizations)
3969#define HAVE_asindf2 (TARGET_USE_FANCY_MATH_387 \
3970 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3971 || TARGET_MIX_SSE_I387) \
3972 && flag_unsafe_math_optimizations)
3973#define HAVE_acosxf2 (TARGET_USE_FANCY_MATH_387 \
3974 && flag_unsafe_math_optimizations)
3975#define HAVE_acossf2 (TARGET_USE_FANCY_MATH_387 \
3976 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3977 || TARGET_MIX_SSE_I387) \
3978 && flag_unsafe_math_optimizations)
3979#define HAVE_acosdf2 (TARGET_USE_FANCY_MATH_387 \
3980 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3981 || TARGET_MIX_SSE_I387) \
3982 && flag_unsafe_math_optimizations)
3983#define HAVE_logxf2 (TARGET_USE_FANCY_MATH_387 \
3984 && flag_unsafe_math_optimizations)
3985#define HAVE_logsf2 (TARGET_USE_FANCY_MATH_387 \
3986 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3987 || TARGET_MIX_SSE_I387) \
3988 && flag_unsafe_math_optimizations)
3989#define HAVE_logdf2 (TARGET_USE_FANCY_MATH_387 \
3990 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
3991 || TARGET_MIX_SSE_I387) \
3992 && flag_unsafe_math_optimizations)
3993#define HAVE_log10xf2 (TARGET_USE_FANCY_MATH_387 \
3994 && flag_unsafe_math_optimizations)
3995#define HAVE_log10sf2 (TARGET_USE_FANCY_MATH_387 \
3996 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
3997 || TARGET_MIX_SSE_I387) \
3998 && flag_unsafe_math_optimizations)
3999#define HAVE_log10df2 (TARGET_USE_FANCY_MATH_387 \
4000 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4001 || TARGET_MIX_SSE_I387) \
4002 && flag_unsafe_math_optimizations)
4003#define HAVE_log2xf2 (TARGET_USE_FANCY_MATH_387 \
4004 && flag_unsafe_math_optimizations)
4005#define HAVE_log2sf2 (TARGET_USE_FANCY_MATH_387 \
4006 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4007 || TARGET_MIX_SSE_I387) \
4008 && flag_unsafe_math_optimizations)
4009#define HAVE_log2df2 (TARGET_USE_FANCY_MATH_387 \
4010 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4011 || TARGET_MIX_SSE_I387) \
4012 && flag_unsafe_math_optimizations)
4013#define HAVE_log1pxf2 (TARGET_USE_FANCY_MATH_387 \
4014 && flag_unsafe_math_optimizations)
4015#define HAVE_log1psf2 (TARGET_USE_FANCY_MATH_387 \
4016 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4017 || TARGET_MIX_SSE_I387) \
4018 && flag_unsafe_math_optimizations)
4019#define HAVE_log1pdf2 (TARGET_USE_FANCY_MATH_387 \
4020 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4021 || TARGET_MIX_SSE_I387) \
4022 && flag_unsafe_math_optimizations)
4023#define HAVE_logbxf2 (TARGET_USE_FANCY_MATH_387 \
4024 && flag_unsafe_math_optimizations)
4025#define HAVE_logbsf2 (TARGET_USE_FANCY_MATH_387 \
4026 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4027 || TARGET_MIX_SSE_I387) \
4028 && flag_unsafe_math_optimizations)
4029#define HAVE_logbdf2 (TARGET_USE_FANCY_MATH_387 \
4030 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4031 || TARGET_MIX_SSE_I387) \
4032 && flag_unsafe_math_optimizations)
4033#define HAVE_ilogbxf2 (TARGET_USE_FANCY_MATH_387 \
4034 && flag_unsafe_math_optimizations)
4035#define HAVE_ilogbsf2 (TARGET_USE_FANCY_MATH_387 \
4036 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4037 || TARGET_MIX_SSE_I387) \
4038 && flag_unsafe_math_optimizations)
4039#define HAVE_ilogbdf2 (TARGET_USE_FANCY_MATH_387 \
4040 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4041 || TARGET_MIX_SSE_I387) \
4042 && flag_unsafe_math_optimizations)
4043#define HAVE_expNcorexf3 (TARGET_USE_FANCY_MATH_387 \
4044 && flag_unsafe_math_optimizations)
4045#define HAVE_expxf2 (TARGET_USE_FANCY_MATH_387 \
4046 && flag_unsafe_math_optimizations)
4047#define HAVE_expsf2 (TARGET_USE_FANCY_MATH_387 \
4048 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4049 || TARGET_MIX_SSE_I387) \
4050 && flag_unsafe_math_optimizations)
4051#define HAVE_expdf2 (TARGET_USE_FANCY_MATH_387 \
4052 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4053 || TARGET_MIX_SSE_I387) \
4054 && flag_unsafe_math_optimizations)
4055#define HAVE_exp10xf2 (TARGET_USE_FANCY_MATH_387 \
4056 && flag_unsafe_math_optimizations)
4057#define HAVE_exp10sf2 (TARGET_USE_FANCY_MATH_387 \
4058 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4059 || TARGET_MIX_SSE_I387) \
4060 && flag_unsafe_math_optimizations)
4061#define HAVE_exp10df2 (TARGET_USE_FANCY_MATH_387 \
4062 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4063 || TARGET_MIX_SSE_I387) \
4064 && flag_unsafe_math_optimizations)
4065#define HAVE_exp2xf2 (TARGET_USE_FANCY_MATH_387 \
4066 && flag_unsafe_math_optimizations)
4067#define HAVE_exp2sf2 (TARGET_USE_FANCY_MATH_387 \
4068 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4069 || TARGET_MIX_SSE_I387) \
4070 && flag_unsafe_math_optimizations)
4071#define HAVE_exp2df2 (TARGET_USE_FANCY_MATH_387 \
4072 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4073 || TARGET_MIX_SSE_I387) \
4074 && flag_unsafe_math_optimizations)
4075#define HAVE_expm1xf2 (TARGET_USE_FANCY_MATH_387 \
4076 && flag_unsafe_math_optimizations)
4077#define HAVE_expm1sf2 (TARGET_USE_FANCY_MATH_387 \
4078 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4079 || TARGET_MIX_SSE_I387) \
4080 && flag_unsafe_math_optimizations)
4081#define HAVE_expm1df2 (TARGET_USE_FANCY_MATH_387 \
4082 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4083 || TARGET_MIX_SSE_I387) \
4084 && flag_unsafe_math_optimizations)
4085#define HAVE_ldexpxf3 (TARGET_USE_FANCY_MATH_387 \
4086 && flag_unsafe_math_optimizations)
4087#define HAVE_ldexpsf3 (TARGET_USE_FANCY_MATH_387 \
4088 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4089 || TARGET_MIX_SSE_I387) \
4090 && flag_unsafe_math_optimizations)
4091#define HAVE_ldexpdf3 (TARGET_USE_FANCY_MATH_387 \
4092 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4093 || TARGET_MIX_SSE_I387) \
4094 && flag_unsafe_math_optimizations)
4095#define HAVE_scalbxf3 (TARGET_USE_FANCY_MATH_387 \
4096 && flag_unsafe_math_optimizations)
4097#define HAVE_scalbsf3 (TARGET_USE_FANCY_MATH_387 \
4098 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4099 || TARGET_MIX_SSE_I387) \
4100 && flag_unsafe_math_optimizations)
4101#define HAVE_scalbdf3 (TARGET_USE_FANCY_MATH_387 \
4102 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4103 || TARGET_MIX_SSE_I387) \
4104 && flag_unsafe_math_optimizations)
4105#define HAVE_significandxf2 (TARGET_USE_FANCY_MATH_387 \
4106 && flag_unsafe_math_optimizations)
4107#define HAVE_significandsf2 (TARGET_USE_FANCY_MATH_387 \
4108 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4109 || TARGET_MIX_SSE_I387) \
4110 && flag_unsafe_math_optimizations)
4111#define HAVE_significanddf2 (TARGET_USE_FANCY_MATH_387 \
4112 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4113 || TARGET_MIX_SSE_I387) \
4114 && flag_unsafe_math_optimizations)
4115#define HAVE_rintsf2 ((TARGET_USE_FANCY_MATH_387 \
4116 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4117 || TARGET_MIX_SSE_I387)) \
4118 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
4119#define HAVE_rintdf2 ((TARGET_USE_FANCY_MATH_387 \
4120 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4121 || TARGET_MIX_SSE_I387)) \
4122 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
4123#define HAVE_roundsf2 ((TARGET_USE_FANCY_MATH_387 \
4124 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4125 || TARGET_MIX_SSE_I387) \
4126 && flag_unsafe_math_optimizations \
4127 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4128 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4129 && !flag_trapping_math && !flag_rounding_math))
4130#define HAVE_rounddf2 ((TARGET_USE_FANCY_MATH_387 \
4131 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4132 || TARGET_MIX_SSE_I387) \
4133 && flag_unsafe_math_optimizations \
4134 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4135 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4136 && !flag_trapping_math && !flag_rounding_math))
4137#define HAVE_roundxf2 ((TARGET_USE_FANCY_MATH_387 \
4138 && (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
4139 || TARGET_MIX_SSE_I387) \
4140 && flag_unsafe_math_optimizations \
4141 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4142 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
4143 && !flag_trapping_math && !flag_rounding_math))
4144#define HAVE_lrintxfhi2 (TARGET_USE_FANCY_MATH_387)
4145#define HAVE_lrintxfsi2 (TARGET_USE_FANCY_MATH_387)
4146#define HAVE_lrintxfdi2 (TARGET_USE_FANCY_MATH_387)
4147#define HAVE_lrintsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
4148#define HAVE_lrintsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) && (TARGET_64BIT))
4149#define HAVE_lrintdfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
4150#define HAVE_lrintdfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) && (TARGET_64BIT))
4151#define HAVE_lroundsfhi2 ((TARGET_USE_FANCY_MATH_387 \
4152 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4153 || TARGET_MIX_SSE_I387) \
4154 && flag_unsafe_math_optimizations) \
4155 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4156 && HImode != HImode \
4157 && ((HImode != DImode) || TARGET_64BIT) \
4158 && !flag_trapping_math && !flag_rounding_math))
4159#define HAVE_lrounddfhi2 ((TARGET_USE_FANCY_MATH_387 \
4160 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4161 || TARGET_MIX_SSE_I387) \
4162 && flag_unsafe_math_optimizations) \
4163 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4164 && HImode != HImode \
4165 && ((HImode != DImode) || TARGET_64BIT) \
4166 && !flag_trapping_math && !flag_rounding_math))
4167#define HAVE_lroundxfhi2 ((TARGET_USE_FANCY_MATH_387 \
4168 && (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
4169 || TARGET_MIX_SSE_I387) \
4170 && flag_unsafe_math_optimizations) \
4171 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
4172 && HImode != HImode \
4173 && ((HImode != DImode) || TARGET_64BIT) \
4174 && !flag_trapping_math && !flag_rounding_math))
4175#define HAVE_lroundsfsi2 ((TARGET_USE_FANCY_MATH_387 \
4176 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4177 || TARGET_MIX_SSE_I387) \
4178 && flag_unsafe_math_optimizations) \
4179 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4180 && SImode != HImode \
4181 && ((SImode != DImode) || TARGET_64BIT) \
4182 && !flag_trapping_math && !flag_rounding_math))
4183#define HAVE_lrounddfsi2 ((TARGET_USE_FANCY_MATH_387 \
4184 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4185 || TARGET_MIX_SSE_I387) \
4186 && flag_unsafe_math_optimizations) \
4187 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4188 && SImode != HImode \
4189 && ((SImode != DImode) || TARGET_64BIT) \
4190 && !flag_trapping_math && !flag_rounding_math))
4191#define HAVE_lroundxfsi2 ((TARGET_USE_FANCY_MATH_387 \
4192 && (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
4193 || TARGET_MIX_SSE_I387) \
4194 && flag_unsafe_math_optimizations) \
4195 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
4196 && SImode != HImode \
4197 && ((SImode != DImode) || TARGET_64BIT) \
4198 && !flag_trapping_math && !flag_rounding_math))
4199#define HAVE_lroundsfdi2 ((TARGET_USE_FANCY_MATH_387 \
4200 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4201 || TARGET_MIX_SSE_I387) \
4202 && flag_unsafe_math_optimizations) \
4203 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4204 && DImode != HImode \
4205 && ((DImode != DImode) || TARGET_64BIT) \
4206 && !flag_trapping_math && !flag_rounding_math))
4207#define HAVE_lrounddfdi2 ((TARGET_USE_FANCY_MATH_387 \
4208 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4209 || TARGET_MIX_SSE_I387) \
4210 && flag_unsafe_math_optimizations) \
4211 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4212 && DImode != HImode \
4213 && ((DImode != DImode) || TARGET_64BIT) \
4214 && !flag_trapping_math && !flag_rounding_math))
4215#define HAVE_lroundxfdi2 ((TARGET_USE_FANCY_MATH_387 \
4216 && (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
4217 || TARGET_MIX_SSE_I387) \
4218 && flag_unsafe_math_optimizations) \
4219 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
4220 && DImode != HImode \
4221 && ((DImode != DImode) || TARGET_64BIT) \
4222 && !flag_trapping_math && !flag_rounding_math))
4223#define HAVE_floorxf2 (TARGET_USE_FANCY_MATH_387 \
4224 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
4225#define HAVE_ceilxf2 (TARGET_USE_FANCY_MATH_387 \
4226 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
4227#define HAVE_btruncxf2 (TARGET_USE_FANCY_MATH_387 \
4228 && (flag_fp_int_builtin_inexact || !flag_trapping_math))
4229#define HAVE_floorsf2 ((TARGET_USE_FANCY_MATH_387 \
4230 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4231 || TARGET_MIX_SSE_I387) \
4232 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4233 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4234 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4235#define HAVE_ceilsf2 ((TARGET_USE_FANCY_MATH_387 \
4236 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4237 || TARGET_MIX_SSE_I387) \
4238 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4239 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4240 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4241#define HAVE_btruncsf2 ((TARGET_USE_FANCY_MATH_387 \
4242 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4243 || TARGET_MIX_SSE_I387) \
4244 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4245 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4246 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4247#define HAVE_floordf2 ((TARGET_USE_FANCY_MATH_387 \
4248 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4249 || TARGET_MIX_SSE_I387) \
4250 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4251 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4252 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4253#define HAVE_ceildf2 ((TARGET_USE_FANCY_MATH_387 \
4254 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4255 || TARGET_MIX_SSE_I387) \
4256 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4257 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4258 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4259#define HAVE_btruncdf2 ((TARGET_USE_FANCY_MATH_387 \
4260 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4261 || TARGET_MIX_SSE_I387) \
4262 && (flag_fp_int_builtin_inexact || !flag_trapping_math)) \
4263 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4264 && (TARGET_ROUND || !flag_trapping_math || flag_fp_int_builtin_inexact)))
4265#define HAVE_nearbyintxf2 (TARGET_USE_FANCY_MATH_387 \
4266 && flag_unsafe_math_optimizations)
4267#define HAVE_nearbyintsf2 (TARGET_USE_FANCY_MATH_387 \
4268 && (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
4269 || TARGET_MIX_SSE_I387) \
4270 && flag_unsafe_math_optimizations)
4271#define HAVE_nearbyintdf2 (TARGET_USE_FANCY_MATH_387 \
4272 && (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
4273 || TARGET_MIX_SSE_I387) \
4274 && flag_unsafe_math_optimizations)
4275#define HAVE_lfloorxfhi2 (TARGET_USE_FANCY_MATH_387 \
4276 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4277 && flag_unsafe_math_optimizations)
4278#define HAVE_lceilxfhi2 (TARGET_USE_FANCY_MATH_387 \
4279 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4280 && flag_unsafe_math_optimizations)
4281#define HAVE_lfloorxfsi2 (TARGET_USE_FANCY_MATH_387 \
4282 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4283 && flag_unsafe_math_optimizations)
4284#define HAVE_lceilxfsi2 (TARGET_USE_FANCY_MATH_387 \
4285 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4286 && flag_unsafe_math_optimizations)
4287#define HAVE_lfloorxfdi2 (TARGET_USE_FANCY_MATH_387 \
4288 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4289 && flag_unsafe_math_optimizations)
4290#define HAVE_lceilxfdi2 (TARGET_USE_FANCY_MATH_387 \
4291 && (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
4292 && flag_unsafe_math_optimizations)
4293#define HAVE_lfloorsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4294 && !flag_trapping_math)
4295#define HAVE_lceilsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4296 && !flag_trapping_math)
4297#define HAVE_lfloorsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4298 && !flag_trapping_math) && (TARGET_64BIT))
4299#define HAVE_lceilsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
4300 && !flag_trapping_math) && (TARGET_64BIT))
4301#define HAVE_lfloordfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4302 && !flag_trapping_math)
4303#define HAVE_lceildfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4304 && !flag_trapping_math)
4305#define HAVE_lfloordfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4306 && !flag_trapping_math) && (TARGET_64BIT))
4307#define HAVE_lceildfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
4308 && !flag_trapping_math) && (TARGET_64BIT))
4309#define HAVE_isinfxf2 (TARGET_USE_FANCY_MATH_387 \
4310 && ix86_libc_has_function (function_c99_misc))
4311#define HAVE_isinfsf2 (TARGET_USE_FANCY_MATH_387 \
4312 && ix86_libc_has_function (function_c99_misc) \
4313 && !(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
4314#define HAVE_isinfdf2 (TARGET_USE_FANCY_MATH_387 \
4315 && ix86_libc_has_function (function_c99_misc) \
4316 && !(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
4317#define HAVE_signbittf2 (TARGET_SSE)
4318#define HAVE_signbitxf2 (TARGET_USE_FANCY_MATH_387)
4319#define HAVE_signbitdf2 (TARGET_USE_FANCY_MATH_387 \
4320 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
4321#define HAVE_signbitsf2 (TARGET_USE_FANCY_MATH_387 \
4322 && !(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
4323#define HAVE_movmemsi 1
4324#define HAVE_movmemdi (TARGET_64BIT)
4325#define HAVE_strmov 1
4326#define HAVE_strmov_singleop 1
4327#define HAVE_rep_mov 1
4328#define HAVE_setmemsi 1
4329#define HAVE_setmemdi (TARGET_64BIT)
4330#define HAVE_strset 1
4331#define HAVE_strset_singleop 1
4332#define HAVE_rep_stos 1
4333#define HAVE_cmpstrnsi 1
4334#define HAVE_cmpintqi 1
4335#define HAVE_cmpstrnqi_nz_1 1
4336#define HAVE_cmpstrnqi_1 1
4337#define HAVE_strlensi (Pmode == SImode)
4338#define HAVE_strlendi (Pmode == DImode)
4339#define HAVE_strlenqi_1 1
4340#define HAVE_movqicc (TARGET_QIMODE_MATH)
4341#define HAVE_movhicc (TARGET_HIMODE_MATH)
4342#define HAVE_movsicc 1
4343#define HAVE_movdicc (TARGET_64BIT)
4344#define HAVE_x86_movsicc_0_m1 1
4345#define HAVE_x86_movdicc_0_m1 (TARGET_64BIT)
4346#define HAVE_movsfcc ((TARGET_80387 && TARGET_CMOVE) \
4347 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
4348#define HAVE_movdfcc ((TARGET_80387 && TARGET_CMOVE) \
4349 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
4350#define HAVE_movxfcc ((TARGET_80387 && TARGET_CMOVE) \
4351 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
4352#define HAVE_addqicc 1
4353#define HAVE_addhicc 1
4354#define HAVE_addsicc 1
4355#define HAVE_adddicc (TARGET_64BIT)
4356#define HAVE_allocate_stack (ix86_target_stack_probe ())
4357#define HAVE_probe_stack 1
4358#define HAVE_builtin_setjmp_receiver (!TARGET_64BIT && flag_pic)
4359#define HAVE_prefetch (TARGET_3DNOW || TARGET_PREFETCH_SSE || TARGET_PRFCHW || TARGET_PREFETCHWT1)
4360#define HAVE_stack_protect_set (TARGET_SSP_TLS_GUARD)
4361#define HAVE_stack_protect_test (TARGET_SSP_TLS_GUARD)
4362#define HAVE_lwp_llwpcb (TARGET_LWP)
4363#define HAVE_lwp_slwpcb (TARGET_LWP)
4364#define HAVE_lwp_lwpvalsi3 (TARGET_LWP)
4365#define HAVE_lwp_lwpvaldi3 ((TARGET_LWP) && (TARGET_64BIT))
4366#define HAVE_lwp_lwpinssi3 (TARGET_LWP)
4367#define HAVE_lwp_lwpinsdi3 ((TARGET_LWP) && (TARGET_64BIT))
4368#define HAVE_pause 1
4369#define HAVE_xbegin (TARGET_RTM)
4370#define HAVE_xtest (TARGET_RTM)
4371#define HAVE_bnd32_mk ((TARGET_MPX) && (!TARGET_LP64))
4372#define HAVE_bnd64_mk ((TARGET_MPX) && (TARGET_LP64))
4373#define HAVE_movbnd32 ((TARGET_MPX) && (!TARGET_LP64))
4374#define HAVE_movbnd64 ((TARGET_MPX) && (TARGET_LP64))
4375#define HAVE_bnd32_cl ((TARGET_MPX) && (!TARGET_LP64))
4376#define HAVE_bnd32_cu ((TARGET_MPX) && (!TARGET_LP64))
4377#define HAVE_bnd32_cn ((TARGET_MPX) && (!TARGET_LP64))
4378#define HAVE_bnd64_cl ((TARGET_MPX) && (TARGET_LP64))
4379#define HAVE_bnd64_cu ((TARGET_MPX) && (TARGET_LP64))
4380#define HAVE_bnd64_cn ((TARGET_MPX) && (TARGET_LP64))
4381#define HAVE_bnd32_ldx ((TARGET_MPX) && (!TARGET_LP64))
4382#define HAVE_bnd64_ldx ((TARGET_MPX) && (TARGET_LP64))
4383#define HAVE_bnd32_stx ((TARGET_MPX) && (!TARGET_LP64))
4384#define HAVE_bnd64_stx ((TARGET_MPX) && (TARGET_LP64))
4385#define HAVE_rdpkru (TARGET_PKU)
4386#define HAVE_wrpkru (TARGET_PKU)
4387#define HAVE_movv8qi (TARGET_MMX)
4388#define HAVE_movv4hi (TARGET_MMX)
4389#define HAVE_movv2si (TARGET_MMX)
4390#define HAVE_movv1di (TARGET_MMX)
4391#define HAVE_movv2sf (TARGET_MMX)
4392#define HAVE_movmisalignv8qi (TARGET_MMX)
4393#define HAVE_movmisalignv4hi (TARGET_MMX)
4394#define HAVE_movmisalignv2si (TARGET_MMX)
4395#define HAVE_movmisalignv1di (TARGET_MMX)
4396#define HAVE_movmisalignv2sf (TARGET_MMX)
4397#define HAVE_mmx_addv2sf3 (TARGET_3DNOW)
4398#define HAVE_mmx_subv2sf3 (TARGET_3DNOW)
4399#define HAVE_mmx_subrv2sf3 (TARGET_3DNOW)
4400#define HAVE_mmx_mulv2sf3 (TARGET_3DNOW)
4401#define HAVE_mmx_smaxv2sf3 (TARGET_3DNOW)
4402#define HAVE_mmx_sminv2sf3 (TARGET_3DNOW)
4403#define HAVE_mmx_eqv2sf3 (TARGET_3DNOW)
4404#define HAVE_vec_setv2sf (TARGET_MMX)
4405#define HAVE_vec_extractv2sf (TARGET_MMX)
4406#define HAVE_vec_initv2sf (TARGET_SSE)
4407#define HAVE_mmx_addv8qi3 (TARGET_MMX || (TARGET_SSE2 && V8QImode == V1DImode))
4408#define HAVE_mmx_subv8qi3 (TARGET_MMX || (TARGET_SSE2 && V8QImode == V1DImode))
4409#define HAVE_mmx_addv4hi3 (TARGET_MMX || (TARGET_SSE2 && V4HImode == V1DImode))
4410#define HAVE_mmx_subv4hi3 (TARGET_MMX || (TARGET_SSE2 && V4HImode == V1DImode))
4411#define HAVE_mmx_addv2si3 (TARGET_MMX || (TARGET_SSE2 && V2SImode == V1DImode))
4412#define HAVE_mmx_subv2si3 (TARGET_MMX || (TARGET_SSE2 && V2SImode == V1DImode))
4413#define HAVE_mmx_addv1di3 (TARGET_MMX || (TARGET_SSE2 && V1DImode == V1DImode))
4414#define HAVE_mmx_subv1di3 (TARGET_MMX || (TARGET_SSE2 && V1DImode == V1DImode))
4415#define HAVE_mmx_ssaddv8qi3 (TARGET_MMX)
4416#define HAVE_mmx_usaddv8qi3 (TARGET_MMX)
4417#define HAVE_mmx_sssubv8qi3 (TARGET_MMX)
4418#define HAVE_mmx_ussubv8qi3 (TARGET_MMX)
4419#define HAVE_mmx_ssaddv4hi3 (TARGET_MMX)
4420#define HAVE_mmx_usaddv4hi3 (TARGET_MMX)
4421#define HAVE_mmx_sssubv4hi3 (TARGET_MMX)
4422#define HAVE_mmx_ussubv4hi3 (TARGET_MMX)
4423#define HAVE_mmx_mulv4hi3 (TARGET_MMX)
4424#define HAVE_mmx_smulv4hi3_highpart (TARGET_MMX)
4425#define HAVE_mmx_umulv4hi3_highpart (TARGET_SSE || TARGET_3DNOW_A)
4426#define HAVE_mmx_pmaddwd (TARGET_MMX)
4427#define HAVE_mmx_pmulhrwv4hi3 (TARGET_3DNOW)
4428#define HAVE_sse2_umulv1siv1di3 (TARGET_SSE2)
4429#define HAVE_mmx_smaxv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
4430#define HAVE_mmx_sminv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
4431#define HAVE_mmx_umaxv8qi3 (TARGET_SSE || TARGET_3DNOW_A)
4432#define HAVE_mmx_uminv8qi3 (TARGET_SSE || TARGET_3DNOW_A)
4433#define HAVE_mmx_eqv8qi3 (TARGET_MMX)
4434#define HAVE_mmx_eqv4hi3 (TARGET_MMX)
4435#define HAVE_mmx_eqv2si3 (TARGET_MMX)
4436#define HAVE_mmx_andv8qi3 (TARGET_MMX)
4437#define HAVE_mmx_iorv8qi3 (TARGET_MMX)
4438#define HAVE_mmx_xorv8qi3 (TARGET_MMX)
4439#define HAVE_mmx_andv4hi3 (TARGET_MMX)
4440#define HAVE_mmx_iorv4hi3 (TARGET_MMX)
4441#define HAVE_mmx_xorv4hi3 (TARGET_MMX)
4442#define HAVE_mmx_andv2si3 (TARGET_MMX)
4443#define HAVE_mmx_iorv2si3 (TARGET_MMX)
4444#define HAVE_mmx_xorv2si3 (TARGET_MMX)
4445#define HAVE_mmx_pinsrw (TARGET_SSE || TARGET_3DNOW_A)
4446#define HAVE_mmx_pshufw (TARGET_SSE || TARGET_3DNOW_A)
4447#define HAVE_vec_setv2si (TARGET_MMX)
4448#define HAVE_vec_extractv2si (TARGET_MMX)
4449#define HAVE_vec_initv2si (TARGET_SSE)
4450#define HAVE_vec_setv4hi (TARGET_MMX)
4451#define HAVE_vec_extractv4hi (TARGET_MMX)
4452#define HAVE_vec_initv4hi (TARGET_SSE)
4453#define HAVE_vec_setv8qi (TARGET_MMX)
4454#define HAVE_vec_extractv8qi (TARGET_MMX)
4455#define HAVE_vec_initv8qi (TARGET_SSE)
4456#define HAVE_mmx_uavgv8qi3 (TARGET_SSE || TARGET_3DNOW)
4457#define HAVE_mmx_uavgv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
4458#define HAVE_mmx_maskmovq (TARGET_SSE || TARGET_3DNOW_A)
4459#define HAVE_mmx_emms (TARGET_MMX)
4460#define HAVE_mmx_femms (TARGET_3DNOW)
4461#define HAVE_movv64qi ((TARGET_SSE) && (TARGET_AVX512F))
4462#define HAVE_movv32qi ((TARGET_SSE) && (TARGET_AVX))
4463#define HAVE_movv16qi (TARGET_SSE)
4464#define HAVE_movv32hi ((TARGET_SSE) && (TARGET_AVX512F))
4465#define HAVE_movv16hi ((TARGET_SSE) && (TARGET_AVX))
4466#define HAVE_movv8hi (TARGET_SSE)
4467#define HAVE_movv16si ((TARGET_SSE) && (TARGET_AVX512F))
4468#define HAVE_movv8si ((TARGET_SSE) && (TARGET_AVX))
4469#define HAVE_movv4si (TARGET_SSE)
4470#define HAVE_movv8di ((TARGET_SSE) && (TARGET_AVX512F))
4471#define HAVE_movv4di ((TARGET_SSE) && (TARGET_AVX))
4472#define HAVE_movv2di (TARGET_SSE)
4473#define HAVE_movv4ti ((TARGET_SSE) && (TARGET_AVX512BW))
4474#define HAVE_movv2ti ((TARGET_SSE) && (TARGET_AVX))
4475#define HAVE_movv1ti (TARGET_SSE)
4476#define HAVE_movv16sf ((TARGET_SSE) && (TARGET_AVX512F))
4477#define HAVE_movv8sf ((TARGET_SSE) && (TARGET_AVX))
4478#define HAVE_movv4sf (TARGET_SSE)
4479#define HAVE_movv8df ((TARGET_SSE) && (TARGET_AVX512F))
4480#define HAVE_movv4df ((TARGET_SSE) && (TARGET_AVX))
4481#define HAVE_movv2df (TARGET_SSE)
4482#define HAVE_movmisalignv64qi ((TARGET_SSE) && (TARGET_AVX512F))
4483#define HAVE_movmisalignv32qi ((TARGET_SSE) && (TARGET_AVX))
4484#define HAVE_movmisalignv16qi (TARGET_SSE)
4485#define HAVE_movmisalignv32hi ((TARGET_SSE) && (TARGET_AVX512F))
4486#define HAVE_movmisalignv16hi ((TARGET_SSE) && (TARGET_AVX))
4487#define HAVE_movmisalignv8hi (TARGET_SSE)
4488#define HAVE_movmisalignv16si ((TARGET_SSE) && (TARGET_AVX512F))
4489#define HAVE_movmisalignv8si ((TARGET_SSE) && (TARGET_AVX))
4490#define HAVE_movmisalignv4si (TARGET_SSE)
4491#define HAVE_movmisalignv8di ((TARGET_SSE) && (TARGET_AVX512F))
4492#define HAVE_movmisalignv4di ((TARGET_SSE) && (TARGET_AVX))
4493#define HAVE_movmisalignv2di (TARGET_SSE)
4494#define HAVE_movmisalignv4ti ((TARGET_SSE) && (TARGET_AVX512BW))
4495#define HAVE_movmisalignv2ti ((TARGET_SSE) && (TARGET_AVX))
4496#define HAVE_movmisalignv1ti (TARGET_SSE)
4497#define HAVE_movmisalignv16sf ((TARGET_SSE) && (TARGET_AVX512F))
4498#define HAVE_movmisalignv8sf ((TARGET_SSE) && (TARGET_AVX))
4499#define HAVE_movmisalignv4sf (TARGET_SSE)
4500#define HAVE_movmisalignv8df ((TARGET_SSE) && (TARGET_AVX512F))
4501#define HAVE_movmisalignv4df ((TARGET_SSE) && (TARGET_AVX))
4502#define HAVE_movmisalignv2df (TARGET_SSE)
4503#define HAVE_storentdi ((TARGET_SSE) && (TARGET_SSE2 && TARGET_64BIT))
4504#define HAVE_storentsi ((TARGET_SSE) && (TARGET_SSE2))
4505#define HAVE_storentsf ((TARGET_SSE) && (TARGET_SSE4A))
4506#define HAVE_storentdf ((TARGET_SSE) && (TARGET_SSE4A))
4507#define HAVE_storentv8di ((TARGET_SSE) && (TARGET_AVX512F))
4508#define HAVE_storentv4di ((TARGET_SSE) && (TARGET_AVX))
4509#define HAVE_storentv2di ((TARGET_SSE) && (TARGET_SSE2))
4510#define HAVE_storentv16sf ((TARGET_SSE) && (TARGET_AVX512F))
4511#define HAVE_storentv8sf ((TARGET_SSE) && (TARGET_AVX))
4512#define HAVE_storentv4sf (TARGET_SSE)
4513#define HAVE_storentv8df ((TARGET_SSE) && (TARGET_AVX512F))
4514#define HAVE_storentv4df ((TARGET_SSE) && (TARGET_AVX))
4515#define HAVE_storentv2df ((TARGET_SSE) && (TARGET_SSE2))
4516#define HAVE_kmovb ((TARGET_AVX512F \
4517 && !(MEM_P (operands[0]) && MEM_P (operands[1]))) && (TARGET_AVX512DQ))
4518#define HAVE_kmovw (TARGET_AVX512F \
4519 && !(MEM_P (operands[0]) && MEM_P (operands[1])))
4520#define HAVE_kmovd ((TARGET_AVX512F \
4521 && !(MEM_P (operands[0]) && MEM_P (operands[1]))) && (TARGET_AVX512BW))
4522#define HAVE_kmovq ((TARGET_AVX512F \
4523 && !(MEM_P (operands[0]) && MEM_P (operands[1]))) && (TARGET_AVX512BW))
4524#define HAVE_absv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
4525#define HAVE_negv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
4526#define HAVE_absv8sf2 ((TARGET_SSE) && (TARGET_AVX))
4527#define HAVE_negv8sf2 ((TARGET_SSE) && (TARGET_AVX))
4528#define HAVE_absv4sf2 (TARGET_SSE)
4529#define HAVE_negv4sf2 (TARGET_SSE)
4530#define HAVE_absv8df2 ((TARGET_SSE) && (TARGET_AVX512F))
4531#define HAVE_negv8df2 ((TARGET_SSE) && (TARGET_AVX512F))
4532#define HAVE_absv4df2 ((TARGET_SSE) && (TARGET_AVX))
4533#define HAVE_negv4df2 ((TARGET_SSE) && (TARGET_AVX))
4534#define HAVE_absv2df2 ((TARGET_SSE) && (TARGET_SSE2))
4535#define HAVE_negv2df2 ((TARGET_SSE) && (TARGET_SSE2))
4536#define HAVE_addv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4537#define HAVE_addv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
4538 || V16SFmode == V8DFmode \
4539 || V16SFmode == V8DImode \
4540 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
4541#define HAVE_addv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4542#define HAVE_addv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
4543 || V16SFmode == V8DFmode \
4544 || V16SFmode == V8DImode \
4545 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
4546#define HAVE_subv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4547#define HAVE_subv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
4548 || V16SFmode == V8DFmode \
4549 || V16SFmode == V8DImode \
4550 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
4551#define HAVE_subv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4552#define HAVE_subv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
4553 || V16SFmode == V8DFmode \
4554 || V16SFmode == V8DImode \
4555 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
4556#define HAVE_addv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4557#define HAVE_addv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4558#define HAVE_subv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4559#define HAVE_subv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4560#define HAVE_addv4sf3 (TARGET_SSE && 1 && 1)
4561#define HAVE_addv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
4562#define HAVE_subv4sf3 (TARGET_SSE && 1 && 1)
4563#define HAVE_subv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
4564#define HAVE_addv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4565#define HAVE_addv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
4566 || V8DFmode == V8DFmode \
4567 || V8DFmode == V8DImode \
4568 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
4569#define HAVE_addv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4570#define HAVE_addv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
4571 || V8DFmode == V8DFmode \
4572 || V8DFmode == V8DImode \
4573 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
4574#define HAVE_subv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4575#define HAVE_subv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
4576 || V8DFmode == V8DFmode \
4577 || V8DFmode == V8DImode \
4578 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
4579#define HAVE_subv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4580#define HAVE_subv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
4581 || V8DFmode == V8DFmode \
4582 || V8DFmode == V8DImode \
4583 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
4584#define HAVE_addv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4585#define HAVE_addv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4586#define HAVE_subv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4587#define HAVE_subv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4588#define HAVE_addv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
4589#define HAVE_addv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
4590#define HAVE_subv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
4591#define HAVE_subv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
4592#define HAVE_mulv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4593#define HAVE_mulv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
4594 || V16SFmode == V8DFmode \
4595 || V16SFmode == V8DImode \
4596 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
4597#define HAVE_mulv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4598#define HAVE_mulv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
4599 || V16SFmode == V8DFmode \
4600 || V16SFmode == V8DImode \
4601 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
4602#define HAVE_mulv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4603#define HAVE_mulv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4604#define HAVE_mulv4sf3 (TARGET_SSE && 1 && 1)
4605#define HAVE_mulv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
4606#define HAVE_mulv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4607#define HAVE_mulv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
4608 || V8DFmode == V8DFmode \
4609 || V8DFmode == V8DImode \
4610 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
4611#define HAVE_mulv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4612#define HAVE_mulv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
4613 || V8DFmode == V8DFmode \
4614 || V8DFmode == V8DImode \
4615 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
4616#define HAVE_mulv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4617#define HAVE_mulv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4618#define HAVE_mulv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
4619#define HAVE_mulv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
4620#define HAVE_divv8df3 ((TARGET_SSE2) && (TARGET_AVX512F))
4621#define HAVE_divv4df3 ((TARGET_SSE2) && (TARGET_AVX))
4622#define HAVE_divv2df3 (TARGET_SSE2)
4623#define HAVE_divv16sf3 ((TARGET_SSE) && (TARGET_AVX512F))
4624#define HAVE_divv8sf3 ((TARGET_SSE) && (TARGET_AVX))
4625#define HAVE_divv4sf3 (TARGET_SSE)
4626#define HAVE_sqrtv8df2 ((TARGET_SSE2) && (TARGET_AVX512F))
4627#define HAVE_sqrtv4df2 ((TARGET_SSE2) && (TARGET_AVX))
4628#define HAVE_sqrtv2df2 (TARGET_SSE2)
4629#define HAVE_sqrtv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
4630#define HAVE_sqrtv8sf2 ((TARGET_SSE) && (TARGET_AVX))
4631#define HAVE_sqrtv4sf2 (TARGET_SSE)
4632#define HAVE_rsqrtv8sf2 ((TARGET_SSE_MATH) && (TARGET_AVX))
4633#define HAVE_rsqrtv4sf2 (TARGET_SSE_MATH)
4634#define HAVE_rsqrtv16sf2 (TARGET_SSE_MATH && TARGET_AVX512ER)
4635#define HAVE_smaxv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4636#define HAVE_smaxv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
4637 || V16SFmode == V8DFmode \
4638 || V16SFmode == V8DImode \
4639 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
4640#define HAVE_smaxv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4641#define HAVE_smaxv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
4642 || V16SFmode == V8DFmode \
4643 || V16SFmode == V8DImode \
4644 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
4645#define HAVE_sminv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4646#define HAVE_sminv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
4647 || V16SFmode == V8DFmode \
4648 || V16SFmode == V8DImode \
4649 || V16SFmode == V16SImode)) && (TARGET_AVX512F)))
4650#define HAVE_sminv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4651#define HAVE_sminv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
4652 || V16SFmode == V8DFmode \
4653 || V16SFmode == V8DImode \
4654 || V16SFmode == V16SImode)) && (TARGET_AVX512F))))
4655#define HAVE_smaxv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4656#define HAVE_smaxv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4657#define HAVE_sminv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4658#define HAVE_sminv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4659#define HAVE_smaxv4sf3 (TARGET_SSE && 1 && 1)
4660#define HAVE_smaxv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
4661#define HAVE_sminv4sf3 (TARGET_SSE && 1 && 1)
4662#define HAVE_sminv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
4663#define HAVE_smaxv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4664#define HAVE_smaxv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
4665 || V8DFmode == V8DFmode \
4666 || V8DFmode == V8DImode \
4667 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
4668#define HAVE_smaxv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4669#define HAVE_smaxv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
4670 || V8DFmode == V8DFmode \
4671 || V8DFmode == V8DImode \
4672 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
4673#define HAVE_sminv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
4674#define HAVE_sminv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
4675 || V8DFmode == V8DFmode \
4676 || V8DFmode == V8DImode \
4677 || V8DFmode == V16SImode)) && (TARGET_AVX512F)))
4678#define HAVE_sminv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
4679#define HAVE_sminv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
4680 || V8DFmode == V8DFmode \
4681 || V8DFmode == V8DImode \
4682 || V8DFmode == V16SImode)) && (TARGET_AVX512F))))
4683#define HAVE_smaxv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4684#define HAVE_smaxv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4685#define HAVE_sminv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
4686#define HAVE_sminv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
4687#define HAVE_smaxv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
4688#define HAVE_smaxv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
4689#define HAVE_sminv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
4690#define HAVE_sminv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
4691#define HAVE_sse3_haddv2df3 (TARGET_SSE3)
4692#define HAVE_reduc_plus_scal_v8df (TARGET_AVX512F)
4693#define HAVE_reduc_plus_scal_v4df (TARGET_AVX)
4694#define HAVE_reduc_plus_scal_v2df (TARGET_SSE3)
4695#define HAVE_reduc_plus_scal_v16sf (TARGET_AVX512F)
4696#define HAVE_reduc_plus_scal_v8sf (TARGET_AVX)
4697#define HAVE_reduc_plus_scal_v4sf (TARGET_SSE)
4698#define HAVE_reduc_smax_scal_v32qi (TARGET_AVX2)
4699#define HAVE_reduc_smin_scal_v32qi (TARGET_AVX2)
4700#define HAVE_reduc_smax_scal_v16hi (TARGET_AVX2)
4701#define HAVE_reduc_smin_scal_v16hi (TARGET_AVX2)
4702#define HAVE_reduc_smax_scal_v8si (TARGET_AVX2)
4703#define HAVE_reduc_smin_scal_v8si (TARGET_AVX2)
4704#define HAVE_reduc_smax_scal_v4di (TARGET_AVX2)
4705#define HAVE_reduc_smin_scal_v4di (TARGET_AVX2)
4706#define HAVE_reduc_smax_scal_v8sf (TARGET_AVX)
4707#define HAVE_reduc_smin_scal_v8sf (TARGET_AVX)
4708#define HAVE_reduc_smax_scal_v4df (TARGET_AVX)
4709#define HAVE_reduc_smin_scal_v4df (TARGET_AVX)
4710#define HAVE_reduc_smax_scal_v4sf (TARGET_SSE)
4711#define HAVE_reduc_smin_scal_v4sf (TARGET_SSE)
4712#define HAVE_reduc_smax_scal_v64qi (TARGET_AVX512BW)
4713#define HAVE_reduc_smin_scal_v64qi (TARGET_AVX512BW)
4714#define HAVE_reduc_smax_scal_v32hi (TARGET_AVX512BW)
4715#define HAVE_reduc_smin_scal_v32hi (TARGET_AVX512BW)
4716#define HAVE_reduc_smax_scal_v16si (TARGET_AVX512F)
4717#define HAVE_reduc_smin_scal_v16si (TARGET_AVX512F)
4718#define HAVE_reduc_smax_scal_v8di (TARGET_AVX512F)
4719#define HAVE_reduc_smin_scal_v8di (TARGET_AVX512F)
4720#define HAVE_reduc_smax_scal_v16sf (TARGET_AVX512F)
4721#define HAVE_reduc_smin_scal_v16sf (TARGET_AVX512F)
4722#define HAVE_reduc_smax_scal_v8df (TARGET_AVX512F)
4723#define HAVE_reduc_smin_scal_v8df (TARGET_AVX512F)
4724#define HAVE_reduc_umax_scal_v16si (TARGET_AVX512F)
4725#define HAVE_reduc_umin_scal_v16si (TARGET_AVX512F)
4726#define HAVE_reduc_umax_scal_v8di (TARGET_AVX512F)
4727#define HAVE_reduc_umin_scal_v8di (TARGET_AVX512F)
4728#define HAVE_reduc_umax_scal_v32hi ((TARGET_AVX512F) && (TARGET_AVX512BW))
4729#define HAVE_reduc_umin_scal_v32hi ((TARGET_AVX512F) && (TARGET_AVX512BW))
4730#define HAVE_reduc_umax_scal_v64qi ((TARGET_AVX512F) && (TARGET_AVX512BW))
4731#define HAVE_reduc_umin_scal_v64qi ((TARGET_AVX512F) && (TARGET_AVX512BW))
4732#define HAVE_reduc_umax_scal_v32qi (TARGET_AVX2)
4733#define HAVE_reduc_umin_scal_v32qi (TARGET_AVX2)
4734#define HAVE_reduc_umax_scal_v16hi (TARGET_AVX2)
4735#define HAVE_reduc_umin_scal_v16hi (TARGET_AVX2)
4736#define HAVE_reduc_umax_scal_v8si (TARGET_AVX2)
4737#define HAVE_reduc_umin_scal_v8si (TARGET_AVX2)
4738#define HAVE_reduc_umax_scal_v4di (TARGET_AVX2)
4739#define HAVE_reduc_umin_scal_v4di (TARGET_AVX2)
4740#define HAVE_reduc_umin_scal_v8hi (TARGET_SSE4_1)
4741#define HAVE_vec_cmpv16sihi (TARGET_AVX512F)
4742#define HAVE_vec_cmpv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4743#define HAVE_vec_cmpv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4744#define HAVE_vec_cmpv8diqi (TARGET_AVX512F)
4745#define HAVE_vec_cmpv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4746#define HAVE_vec_cmpv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4747#define HAVE_vec_cmpv16sfhi (TARGET_AVX512F)
4748#define HAVE_vec_cmpv8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4749#define HAVE_vec_cmpv4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4750#define HAVE_vec_cmpv8dfqi (TARGET_AVX512F)
4751#define HAVE_vec_cmpv4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4752#define HAVE_vec_cmpv2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4753#define HAVE_vec_cmpv64qidi (TARGET_AVX512BW)
4754#define HAVE_vec_cmpv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4755#define HAVE_vec_cmpv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4756#define HAVE_vec_cmpv32hisi (TARGET_AVX512BW)
4757#define HAVE_vec_cmpv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4758#define HAVE_vec_cmpv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4759#define HAVE_vec_cmpv32qiv32qi (TARGET_AVX2)
4760#define HAVE_vec_cmpv16hiv16hi (TARGET_AVX2)
4761#define HAVE_vec_cmpv8siv8si (TARGET_AVX2)
4762#define HAVE_vec_cmpv4div4di (TARGET_AVX2)
4763#define HAVE_vec_cmpv16qiv16qi (TARGET_SSE2)
4764#define HAVE_vec_cmpv8hiv8hi (TARGET_SSE2)
4765#define HAVE_vec_cmpv4siv4si (TARGET_SSE2)
4766#define HAVE_vec_cmpv2div2di (TARGET_SSE4_2)
4767#define HAVE_vec_cmpv8sfv8si (TARGET_AVX)
4768#define HAVE_vec_cmpv4dfv4di (TARGET_AVX)
4769#define HAVE_vec_cmpv4sfv4si (TARGET_SSE)
4770#define HAVE_vec_cmpv2dfv2di ((TARGET_SSE) && (TARGET_SSE2))
4771#define HAVE_vec_cmpuv16sihi (TARGET_AVX512F)
4772#define HAVE_vec_cmpuv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4773#define HAVE_vec_cmpuv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4774#define HAVE_vec_cmpuv8diqi (TARGET_AVX512F)
4775#define HAVE_vec_cmpuv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4776#define HAVE_vec_cmpuv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4777#define HAVE_vec_cmpuv64qidi (TARGET_AVX512BW)
4778#define HAVE_vec_cmpuv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4779#define HAVE_vec_cmpuv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4780#define HAVE_vec_cmpuv32hisi (TARGET_AVX512BW)
4781#define HAVE_vec_cmpuv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4782#define HAVE_vec_cmpuv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4783#define HAVE_vec_cmpuv32qiv32qi (TARGET_AVX2)
4784#define HAVE_vec_cmpuv16hiv16hi (TARGET_AVX2)
4785#define HAVE_vec_cmpuv8siv8si (TARGET_AVX2)
4786#define HAVE_vec_cmpuv4div4di (TARGET_AVX2)
4787#define HAVE_vec_cmpuv16qiv16qi (TARGET_SSE2)
4788#define HAVE_vec_cmpuv8hiv8hi (TARGET_SSE2)
4789#define HAVE_vec_cmpuv4siv4si (TARGET_SSE2)
4790#define HAVE_vec_cmpuv2div2di (TARGET_SSE4_2)
4791#define HAVE_vec_cmpeqv2div2di (TARGET_SSE4_1)
4792#define HAVE_vcondv64qiv16sf (TARGET_AVX512F \
4793 && (GET_MODE_NUNITS (V64QImode) \
4794 == GET_MODE_NUNITS (V16SFmode)))
4795#define HAVE_vcondv32hiv16sf (TARGET_AVX512F \
4796 && (GET_MODE_NUNITS (V32HImode) \
4797 == GET_MODE_NUNITS (V16SFmode)))
4798#define HAVE_vcondv16siv16sf (TARGET_AVX512F \
4799 && (GET_MODE_NUNITS (V16SImode) \
4800 == GET_MODE_NUNITS (V16SFmode)))
4801#define HAVE_vcondv8div16sf (TARGET_AVX512F \
4802 && (GET_MODE_NUNITS (V8DImode) \
4803 == GET_MODE_NUNITS (V16SFmode)))
4804#define HAVE_vcondv16sfv16sf (TARGET_AVX512F \
4805 && (GET_MODE_NUNITS (V16SFmode) \
4806 == GET_MODE_NUNITS (V16SFmode)))
4807#define HAVE_vcondv8dfv16sf (TARGET_AVX512F \
4808 && (GET_MODE_NUNITS (V8DFmode) \
4809 == GET_MODE_NUNITS (V16SFmode)))
4810#define HAVE_vcondv64qiv8df (TARGET_AVX512F \
4811 && (GET_MODE_NUNITS (V64QImode) \
4812 == GET_MODE_NUNITS (V8DFmode)))
4813#define HAVE_vcondv32hiv8df (TARGET_AVX512F \
4814 && (GET_MODE_NUNITS (V32HImode) \
4815 == GET_MODE_NUNITS (V8DFmode)))
4816#define HAVE_vcondv16siv8df (TARGET_AVX512F \
4817 && (GET_MODE_NUNITS (V16SImode) \
4818 == GET_MODE_NUNITS (V8DFmode)))
4819#define HAVE_vcondv8div8df (TARGET_AVX512F \
4820 && (GET_MODE_NUNITS (V8DImode) \
4821 == GET_MODE_NUNITS (V8DFmode)))
4822#define HAVE_vcondv16sfv8df (TARGET_AVX512F \
4823 && (GET_MODE_NUNITS (V16SFmode) \
4824 == GET_MODE_NUNITS (V8DFmode)))
4825#define HAVE_vcondv8dfv8df (TARGET_AVX512F \
4826 && (GET_MODE_NUNITS (V8DFmode) \
4827 == GET_MODE_NUNITS (V8DFmode)))
4828#define HAVE_vcondv32qiv8sf (TARGET_AVX \
4829 && (GET_MODE_NUNITS (V32QImode) \
4830 == GET_MODE_NUNITS (V8SFmode)))
4831#define HAVE_vcondv32qiv4df (TARGET_AVX \
4832 && (GET_MODE_NUNITS (V32QImode) \
4833 == GET_MODE_NUNITS (V4DFmode)))
4834#define HAVE_vcondv16hiv8sf (TARGET_AVX \
4835 && (GET_MODE_NUNITS (V16HImode) \
4836 == GET_MODE_NUNITS (V8SFmode)))
4837#define HAVE_vcondv16hiv4df (TARGET_AVX \
4838 && (GET_MODE_NUNITS (V16HImode) \
4839 == GET_MODE_NUNITS (V4DFmode)))
4840#define HAVE_vcondv8siv8sf (TARGET_AVX \
4841 && (GET_MODE_NUNITS (V8SImode) \
4842 == GET_MODE_NUNITS (V8SFmode)))
4843#define HAVE_vcondv8siv4df (TARGET_AVX \
4844 && (GET_MODE_NUNITS (V8SImode) \
4845 == GET_MODE_NUNITS (V4DFmode)))
4846#define HAVE_vcondv4div8sf (TARGET_AVX \
4847 && (GET_MODE_NUNITS (V4DImode) \
4848 == GET_MODE_NUNITS (V8SFmode)))
4849#define HAVE_vcondv4div4df (TARGET_AVX \
4850 && (GET_MODE_NUNITS (V4DImode) \
4851 == GET_MODE_NUNITS (V4DFmode)))
4852#define HAVE_vcondv8sfv8sf (TARGET_AVX \
4853 && (GET_MODE_NUNITS (V8SFmode) \
4854 == GET_MODE_NUNITS (V8SFmode)))
4855#define HAVE_vcondv8sfv4df (TARGET_AVX \
4856 && (GET_MODE_NUNITS (V8SFmode) \
4857 == GET_MODE_NUNITS (V4DFmode)))
4858#define HAVE_vcondv4dfv8sf (TARGET_AVX \
4859 && (GET_MODE_NUNITS (V4DFmode) \
4860 == GET_MODE_NUNITS (V8SFmode)))
4861#define HAVE_vcondv4dfv4df (TARGET_AVX \
4862 && (GET_MODE_NUNITS (V4DFmode) \
4863 == GET_MODE_NUNITS (V4DFmode)))
4864#define HAVE_vcondv16qiv4sf (TARGET_SSE \
4865 && (GET_MODE_NUNITS (V16QImode) \
4866 == GET_MODE_NUNITS (V4SFmode)))
4867#define HAVE_vcondv16qiv2df ((TARGET_SSE \
4868 && (GET_MODE_NUNITS (V16QImode) \
4869 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4870#define HAVE_vcondv8hiv4sf (TARGET_SSE \
4871 && (GET_MODE_NUNITS (V8HImode) \
4872 == GET_MODE_NUNITS (V4SFmode)))
4873#define HAVE_vcondv8hiv2df ((TARGET_SSE \
4874 && (GET_MODE_NUNITS (V8HImode) \
4875 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4876#define HAVE_vcondv4siv4sf (TARGET_SSE \
4877 && (GET_MODE_NUNITS (V4SImode) \
4878 == GET_MODE_NUNITS (V4SFmode)))
4879#define HAVE_vcondv4siv2df ((TARGET_SSE \
4880 && (GET_MODE_NUNITS (V4SImode) \
4881 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4882#define HAVE_vcondv2div4sf (TARGET_SSE \
4883 && (GET_MODE_NUNITS (V2DImode) \
4884 == GET_MODE_NUNITS (V4SFmode)))
4885#define HAVE_vcondv2div2df ((TARGET_SSE \
4886 && (GET_MODE_NUNITS (V2DImode) \
4887 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4888#define HAVE_vcondv4sfv4sf (TARGET_SSE \
4889 && (GET_MODE_NUNITS (V4SFmode) \
4890 == GET_MODE_NUNITS (V4SFmode)))
4891#define HAVE_vcondv4sfv2df ((TARGET_SSE \
4892 && (GET_MODE_NUNITS (V4SFmode) \
4893 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4894#define HAVE_vcondv2dfv4sf ((TARGET_SSE \
4895 && (GET_MODE_NUNITS (V2DFmode) \
4896 == GET_MODE_NUNITS (V4SFmode))) && (TARGET_SSE2))
4897#define HAVE_vcondv2dfv2df ((TARGET_SSE \
4898 && (GET_MODE_NUNITS (V2DFmode) \
4899 == GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
4900#define HAVE_vcond_mask_v16sihi (TARGET_AVX512F)
4901#define HAVE_vcond_mask_v8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4902#define HAVE_vcond_mask_v4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4903#define HAVE_vcond_mask_v8diqi (TARGET_AVX512F)
4904#define HAVE_vcond_mask_v4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4905#define HAVE_vcond_mask_v2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4906#define HAVE_vcond_mask_v16sfhi (TARGET_AVX512F)
4907#define HAVE_vcond_mask_v8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4908#define HAVE_vcond_mask_v4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4909#define HAVE_vcond_mask_v8dfqi (TARGET_AVX512F)
4910#define HAVE_vcond_mask_v4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4911#define HAVE_vcond_mask_v2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
4912#define HAVE_vcond_mask_v64qidi (TARGET_AVX512BW)
4913#define HAVE_vcond_mask_v16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4914#define HAVE_vcond_mask_v32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4915#define HAVE_vcond_mask_v32hisi (TARGET_AVX512BW)
4916#define HAVE_vcond_mask_v16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4917#define HAVE_vcond_mask_v8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
4918#define HAVE_vcond_mask_v32qiv32qi (TARGET_AVX2)
4919#define HAVE_vcond_mask_v16hiv16hi (TARGET_AVX2)
4920#define HAVE_vcond_mask_v8siv8si (TARGET_AVX2)
4921#define HAVE_vcond_mask_v4div4di (TARGET_AVX2)
4922#define HAVE_vcond_mask_v16qiv16qi (TARGET_SSE2)
4923#define HAVE_vcond_mask_v8hiv8hi (TARGET_SSE2)
4924#define HAVE_vcond_mask_v4siv4si (TARGET_SSE2)
4925#define HAVE_vcond_mask_v2div2di (TARGET_SSE4_2)
4926#define HAVE_vcond_mask_v8sfv8si (TARGET_AVX)
4927#define HAVE_vcond_mask_v4dfv4di (TARGET_AVX)
4928#define HAVE_vcond_mask_v4sfv4si (TARGET_SSE)
4929#define HAVE_vcond_mask_v2dfv2di ((TARGET_SSE) && (TARGET_SSE2))
4930#define HAVE_andv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
4931#define HAVE_andv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4932#define HAVE_iorv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
4933#define HAVE_iorv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4934#define HAVE_xorv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
4935#define HAVE_xorv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4936#define HAVE_andv4sf3 (TARGET_SSE && 1)
4937#define HAVE_andv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
4938#define HAVE_iorv4sf3 (TARGET_SSE && 1)
4939#define HAVE_iorv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
4940#define HAVE_xorv4sf3 (TARGET_SSE && 1)
4941#define HAVE_xorv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
4942#define HAVE_andv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
4943#define HAVE_andv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4944#define HAVE_iorv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
4945#define HAVE_iorv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4946#define HAVE_xorv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
4947#define HAVE_xorv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
4948#define HAVE_andv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
4949#define HAVE_andv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
4950#define HAVE_iorv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
4951#define HAVE_iorv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
4952#define HAVE_xorv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
4953#define HAVE_xorv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
4954#define HAVE_andv16sf3 (TARGET_AVX512F)
4955#define HAVE_andv16sf3_mask (TARGET_AVX512F)
4956#define HAVE_iorv16sf3 (TARGET_AVX512F)
4957#define HAVE_iorv16sf3_mask (TARGET_AVX512F)
4958#define HAVE_xorv16sf3 (TARGET_AVX512F)
4959#define HAVE_xorv16sf3_mask (TARGET_AVX512F)
4960#define HAVE_andv8df3 (TARGET_AVX512F)
4961#define HAVE_andv8df3_mask (TARGET_AVX512F)
4962#define HAVE_iorv8df3 (TARGET_AVX512F)
4963#define HAVE_iorv8df3_mask (TARGET_AVX512F)
4964#define HAVE_xorv8df3 (TARGET_AVX512F)
4965#define HAVE_xorv8df3_mask (TARGET_AVX512F)
4966#define HAVE_copysignv16sf3 ((TARGET_SSE) && (TARGET_AVX512F))
4967#define HAVE_copysignv8sf3 ((TARGET_SSE) && (TARGET_AVX))
4968#define HAVE_copysignv4sf3 (TARGET_SSE)
4969#define HAVE_copysignv8df3 ((TARGET_SSE) && (TARGET_AVX512F))
4970#define HAVE_copysignv4df3 ((TARGET_SSE) && (TARGET_AVX))
4971#define HAVE_copysignv2df3 ((TARGET_SSE) && (TARGET_SSE2))
4972#define HAVE_andtf3 (TARGET_SSE)
4973#define HAVE_iortf3 (TARGET_SSE)
4974#define HAVE_xortf3 (TARGET_SSE)
4975#define HAVE_fmasf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4976#define HAVE_fmadf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4977#define HAVE_fmav4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4978#define HAVE_fmav2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4979#define HAVE_fmav8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4980#define HAVE_fmav4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4981#define HAVE_fmav16sf4 (TARGET_AVX512F)
4982#define HAVE_fmav8df4 (TARGET_AVX512F)
4983#define HAVE_fmssf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4984#define HAVE_fmsdf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4985#define HAVE_fmsv4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4986#define HAVE_fmsv2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4987#define HAVE_fmsv8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4988#define HAVE_fmsv4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4989#define HAVE_fmsv16sf4 (TARGET_AVX512F)
4990#define HAVE_fmsv8df4 (TARGET_AVX512F)
4991#define HAVE_fnmasf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4992#define HAVE_fnmadf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
4993#define HAVE_fnmav4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4994#define HAVE_fnmav2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4995#define HAVE_fnmav8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4996#define HAVE_fnmav4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
4997#define HAVE_fnmav16sf4 (TARGET_AVX512F)
4998#define HAVE_fnmav8df4 (TARGET_AVX512F)
4999#define HAVE_fnmssf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
5000#define HAVE_fnmsdf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
5001#define HAVE_fnmsv4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5002#define HAVE_fnmsv2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5003#define HAVE_fnmsv8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5004#define HAVE_fnmsv4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5005#define HAVE_fnmsv16sf4 (TARGET_AVX512F)
5006#define HAVE_fnmsv8df4 (TARGET_AVX512F)
5007#define HAVE_fma4i_fmadd_sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
5008#define HAVE_fma4i_fmadd_df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
5009#define HAVE_fma4i_fmadd_v4sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5010#define HAVE_fma4i_fmadd_v2df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5011#define HAVE_fma4i_fmadd_v8sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5012#define HAVE_fma4i_fmadd_v4df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
5013#define HAVE_fma4i_fmadd_v16sf (TARGET_AVX512F)
5014#define HAVE_fma4i_fmadd_v8df (TARGET_AVX512F)
5015#define HAVE_avx512f_fmadd_v16sf_maskz (TARGET_AVX512F && 1)
5016#define HAVE_avx512f_fmadd_v16sf_maskz_round ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
5017#define HAVE_avx512vl_fmadd_v8sf_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
5018#define HAVE_avx512vl_fmadd_v8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
5019#define HAVE_avx512vl_fmadd_v4sf_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
5020#define HAVE_avx512vl_fmadd_v4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
5021#define HAVE_avx512f_fmadd_v8df_maskz (TARGET_AVX512F && 1)
5022#define HAVE_avx512f_fmadd_v8df_maskz_round ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
5023#define HAVE_avx512vl_fmadd_v4df_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
5024#define HAVE_avx512vl_fmadd_v4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
5025#define HAVE_avx512vl_fmadd_v2df_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
5026#define HAVE_avx512vl_fmadd_v2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
5027#define HAVE_fmaddsub_v16sf ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX512F))
5028#define HAVE_fmaddsub_v8sf ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX))
5029#define HAVE_fmaddsub_v4sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
5030#define HAVE_fmaddsub_v8df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX512F))
5031#define HAVE_fmaddsub_v4df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX))
5032#define HAVE_fmaddsub_v2df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_SSE2))
5033#define HAVE_avx512f_fmaddsub_v16sf_maskz (TARGET_AVX512F)
5034#define HAVE_avx512f_fmaddsub_v16sf_maskz_round (TARGET_AVX512F)
5035#define HAVE_avx512vl_fmaddsub_v8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5036#define HAVE_avx512vl_fmaddsub_v8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5037#define HAVE_avx512vl_fmaddsub_v4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5038#define HAVE_avx512vl_fmaddsub_v4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5039#define HAVE_avx512f_fmaddsub_v8df_maskz (TARGET_AVX512F)
5040#define HAVE_avx512f_fmaddsub_v8df_maskz_round (TARGET_AVX512F)
5041#define HAVE_avx512vl_fmaddsub_v4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5042#define HAVE_avx512vl_fmaddsub_v4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5043#define HAVE_avx512vl_fmaddsub_v2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5044#define HAVE_avx512vl_fmaddsub_v2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5045#define HAVE_fmai_vmfmadd_v4sf (TARGET_FMA)
5046#define HAVE_fmai_vmfmadd_v4sf_round ((TARGET_AVX512F) && (TARGET_FMA))
5047#define HAVE_fmai_vmfmadd_v2df ((TARGET_FMA) && (TARGET_SSE2))
5048#define HAVE_fmai_vmfmadd_v2df_round ((TARGET_AVX512F) && ((TARGET_FMA) && (TARGET_SSE2)))
5049#define HAVE_fma4i_vmfmadd_v4sf (TARGET_FMA4)
5050#define HAVE_fma4i_vmfmadd_v2df ((TARGET_FMA4) && (TARGET_SSE2))
5051#define HAVE_floatunsv16siv16sf2 ((TARGET_SSE2 && (V16SFmode == V4SFmode || TARGET_AVX2)) && (TARGET_AVX512F))
5052#define HAVE_floatunsv8siv8sf2 ((TARGET_SSE2 && (V8SFmode == V4SFmode || TARGET_AVX2)) && (TARGET_AVX))
5053#define HAVE_floatunsv4siv4sf2 (TARGET_SSE2 && (V4SFmode == V4SFmode || TARGET_AVX2))
5054#define HAVE_fixuns_truncv16sfv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
5055#define HAVE_fixuns_truncv8sfv8si2 ((TARGET_SSE2) && (TARGET_AVX))
5056#define HAVE_fixuns_truncv4sfv4si2 (TARGET_SSE2)
5057#define HAVE_avx_cvtpd2dq256_2 (TARGET_AVX)
5058#define HAVE_avx_cvttpd2dq256_2 (TARGET_AVX)
5059#define HAVE_sse2_cvtpd2ps (TARGET_SSE2)
5060#define HAVE_sse2_cvtpd2ps_mask (TARGET_SSE2)
5061#define HAVE_avx512bw_cvtmask2bv64qi (TARGET_AVX512BW)
5062#define HAVE_avx512vl_cvtmask2bv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5063#define HAVE_avx512vl_cvtmask2bv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5064#define HAVE_avx512bw_cvtmask2wv32hi (TARGET_AVX512BW)
5065#define HAVE_avx512vl_cvtmask2wv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5066#define HAVE_avx512vl_cvtmask2wv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5067#define HAVE_avx512f_cvtmask2dv16si (TARGET_AVX512DQ)
5068#define HAVE_avx512vl_cvtmask2dv8si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
5069#define HAVE_avx512vl_cvtmask2dv4si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
5070#define HAVE_avx512f_cvtmask2qv8di (TARGET_AVX512DQ)
5071#define HAVE_avx512vl_cvtmask2qv4di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
5072#define HAVE_avx512vl_cvtmask2qv2di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
5073#define HAVE_vec_unpacks_hi_v4sf (TARGET_SSE2)
5074#define HAVE_vec_unpacks_hi_v8sf (TARGET_AVX)
5075#define HAVE_vec_unpacks_hi_v16sf (TARGET_AVX512F)
5076#define HAVE_vec_unpacks_lo_v4sf (TARGET_SSE2)
5077#define HAVE_vec_unpacks_lo_v8sf (TARGET_AVX)
5078#define HAVE_vec_unpacks_float_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
5079#define HAVE_vec_unpacks_float_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5080#define HAVE_vec_unpacks_float_hi_v8hi (TARGET_SSE2)
5081#define HAVE_vec_unpacks_float_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
5082#define HAVE_vec_unpacks_float_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5083#define HAVE_vec_unpacks_float_lo_v8hi (TARGET_SSE2)
5084#define HAVE_vec_unpacku_float_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
5085#define HAVE_vec_unpacku_float_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5086#define HAVE_vec_unpacku_float_hi_v8hi (TARGET_SSE2)
5087#define HAVE_vec_unpacku_float_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
5088#define HAVE_vec_unpacku_float_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5089#define HAVE_vec_unpacku_float_lo_v8hi (TARGET_SSE2)
5090#define HAVE_vec_unpacks_float_hi_v4si (TARGET_SSE2)
5091#define HAVE_vec_unpacks_float_lo_v4si (TARGET_SSE2)
5092#define HAVE_vec_unpacks_float_hi_v8si (TARGET_AVX)
5093#define HAVE_vec_unpacks_float_lo_v8si (TARGET_AVX)
5094#define HAVE_vec_unpacks_float_hi_v16si (TARGET_AVX512F)
5095#define HAVE_vec_unpacks_float_lo_v16si (TARGET_AVX512F)
5096#define HAVE_vec_unpacku_float_hi_v4si (TARGET_SSE2)
5097#define HAVE_vec_unpacku_float_lo_v4si (TARGET_SSE2)
5098#define HAVE_vec_unpacku_float_hi_v8si (TARGET_AVX)
5099#define HAVE_vec_unpacku_float_hi_v16si (TARGET_AVX512F)
5100#define HAVE_vec_unpacku_float_lo_v8si (TARGET_AVX)
5101#define HAVE_vec_unpacku_float_lo_v16si (TARGET_AVX512F)
5102#define HAVE_vec_pack_trunc_v8df ((TARGET_AVX) && (TARGET_AVX512F))
5103#define HAVE_vec_pack_trunc_v4df (TARGET_AVX)
5104#define HAVE_vec_pack_trunc_v2df (TARGET_SSE2)
5105#define HAVE_vec_pack_sfix_trunc_v8df (TARGET_AVX512F)
5106#define HAVE_vec_pack_sfix_trunc_v4df (TARGET_AVX)
5107#define HAVE_vec_pack_sfix_trunc_v2df (TARGET_SSE2)
5108#define HAVE_vec_pack_ufix_trunc_v8df ((TARGET_SSE2) && (TARGET_AVX512F))
5109#define HAVE_vec_pack_ufix_trunc_v4df ((TARGET_SSE2) && (TARGET_AVX))
5110#define HAVE_vec_pack_ufix_trunc_v2df (TARGET_SSE2)
5111#define HAVE_avx512f_vec_pack_sfix_v8df (TARGET_AVX512F)
5112#define HAVE_vec_pack_sfix_v4df (TARGET_AVX)
5113#define HAVE_vec_pack_sfix_v2df (TARGET_SSE2)
5114#define HAVE_sse_movhlps_exp (TARGET_SSE)
5115#define HAVE_sse_movlhps_exp (TARGET_SSE)
5116#define HAVE_vec_interleave_highv8sf (TARGET_AVX)
5117#define HAVE_vec_interleave_lowv8sf (TARGET_AVX)
5118#define HAVE_avx_shufps256 (TARGET_AVX)
5119#define HAVE_avx_shufps256_mask ((TARGET_AVX512VL) && (TARGET_AVX))
5120#define HAVE_sse_shufps (TARGET_SSE)
5121#define HAVE_sse_shufps_mask ((TARGET_AVX512VL) && (TARGET_SSE))
5122#define HAVE_sse_loadhps_exp (TARGET_SSE)
5123#define HAVE_sse_loadlps_exp (TARGET_SSE)
5124#define HAVE_vec_initv16qi (TARGET_SSE)
5125#define HAVE_vec_initv8hi (TARGET_SSE)
5126#define HAVE_vec_initv4si (TARGET_SSE)
5127#define HAVE_vec_initv2di (TARGET_SSE)
5128#define HAVE_vec_initv4sf (TARGET_SSE)
5129#define HAVE_vec_initv2df ((TARGET_SSE) && (TARGET_SSE2))
5130#define HAVE_vec_setv32qi ((TARGET_SSE) && (TARGET_AVX))
5131#define HAVE_vec_setv16qi (TARGET_SSE)
5132#define HAVE_vec_setv16hi ((TARGET_SSE) && (TARGET_AVX))
5133#define HAVE_vec_setv8hi (TARGET_SSE)
5134#define HAVE_vec_setv16si ((TARGET_SSE) && (TARGET_AVX512F))
5135#define HAVE_vec_setv8si ((TARGET_SSE) && (TARGET_AVX))
5136#define HAVE_vec_setv4si (TARGET_SSE)
5137#define HAVE_vec_setv8di ((TARGET_SSE) && (TARGET_AVX512F))
5138#define HAVE_vec_setv4di ((TARGET_SSE) && (TARGET_AVX))
5139#define HAVE_vec_setv2di (TARGET_SSE)
5140#define HAVE_vec_setv16sf ((TARGET_SSE) && (TARGET_AVX512F))
5141#define HAVE_vec_setv8sf ((TARGET_SSE) && (TARGET_AVX))
5142#define HAVE_vec_setv4sf (TARGET_SSE)
5143#define HAVE_vec_setv8df ((TARGET_SSE) && (TARGET_AVX512F))
5144#define HAVE_vec_setv4df ((TARGET_SSE) && (TARGET_AVX))
5145#define HAVE_vec_setv2df ((TARGET_SSE) && (TARGET_SSE2))
5146#define HAVE_avx512dq_vextractf64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
5147#define HAVE_avx512dq_vextracti64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
5148#define HAVE_avx512f_vextractf32x4_mask (TARGET_AVX512F)
5149#define HAVE_avx512f_vextracti32x4_mask (TARGET_AVX512F)
5150#define HAVE_avx512dq_vextractf32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
5151#define HAVE_avx512dq_vextracti32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
5152#define HAVE_avx512f_vextractf64x4_mask (TARGET_AVX512F)
5153#define HAVE_avx512f_vextracti64x4_mask (TARGET_AVX512F)
5154#define HAVE_avx512vl_vextractf128v8si (TARGET_AVX512DQ && TARGET_AVX512VL)
5155#define HAVE_avx512vl_vextractf128v8sf (TARGET_AVX512DQ && TARGET_AVX512VL)
5156#define HAVE_avx512vl_vextractf128v4di (TARGET_AVX512DQ && TARGET_AVX512VL)
5157#define HAVE_avx512vl_vextractf128v4df (TARGET_AVX512DQ && TARGET_AVX512VL)
5158#define HAVE_avx_vextractf128v32qi (TARGET_AVX)
5159#define HAVE_avx_vextractf128v16hi (TARGET_AVX)
5160#define HAVE_avx_vextractf128v8si (TARGET_AVX)
5161#define HAVE_avx_vextractf128v4di (TARGET_AVX)
5162#define HAVE_avx_vextractf128v8sf (TARGET_AVX)
5163#define HAVE_avx_vextractf128v4df (TARGET_AVX)
5164#define HAVE_vec_extractv64qi ((TARGET_SSE) && (TARGET_AVX512BW))
5165#define HAVE_vec_extractv32qi ((TARGET_SSE) && (TARGET_AVX))
5166#define HAVE_vec_extractv16qi (TARGET_SSE)
5167#define HAVE_vec_extractv32hi ((TARGET_SSE) && (TARGET_AVX512BW))
5168#define HAVE_vec_extractv16hi ((TARGET_SSE) && (TARGET_AVX))
5169#define HAVE_vec_extractv8hi (TARGET_SSE)
5170#define HAVE_vec_extractv16si ((TARGET_SSE) && (TARGET_AVX512F))
5171#define HAVE_vec_extractv8si ((TARGET_SSE) && (TARGET_AVX))
5172#define HAVE_vec_extractv4si (TARGET_SSE)
5173#define HAVE_vec_extractv8di ((TARGET_SSE) && (TARGET_AVX512F))
5174#define HAVE_vec_extractv4di ((TARGET_SSE) && (TARGET_AVX))
5175#define HAVE_vec_extractv2di (TARGET_SSE)
5176#define HAVE_vec_extractv16sf ((TARGET_SSE) && (TARGET_AVX512F))
5177#define HAVE_vec_extractv8sf ((TARGET_SSE) && (TARGET_AVX))
5178#define HAVE_vec_extractv4sf (TARGET_SSE)
5179#define HAVE_vec_extractv8df ((TARGET_SSE) && (TARGET_AVX512F))
5180#define HAVE_vec_extractv4df ((TARGET_SSE) && (TARGET_AVX))
5181#define HAVE_vec_extractv2df (TARGET_SSE)
5182#define HAVE_vec_interleave_highv4df (TARGET_AVX)
5183#define HAVE_vec_interleave_highv2df (TARGET_SSE2)
5184#define HAVE_avx512f_movddup512 (TARGET_AVX512F)
5185#define HAVE_avx512f_movddup512_mask (TARGET_AVX512F)
5186#define HAVE_avx512f_unpcklpd512 (TARGET_AVX512F)
5187#define HAVE_avx512f_unpcklpd512_mask (TARGET_AVX512F)
5188#define HAVE_avx_movddup256 (TARGET_AVX && 1)
5189#define HAVE_avx_movddup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
5190#define HAVE_avx_unpcklpd256 (TARGET_AVX && 1)
5191#define HAVE_avx_unpcklpd256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
5192#define HAVE_vec_interleave_lowv4df (TARGET_AVX)
5193#define HAVE_vec_interleave_lowv2df (TARGET_SSE2)
5194#define HAVE_avx512f_vternlogv16si_maskz (TARGET_AVX512F)
5195#define HAVE_avx512vl_vternlogv8si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5196#define HAVE_avx512vl_vternlogv4si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5197#define HAVE_avx512f_vternlogv8di_maskz (TARGET_AVX512F)
5198#define HAVE_avx512vl_vternlogv4di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5199#define HAVE_avx512vl_vternlogv2di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5200#define HAVE_avx512f_shufps512_mask (TARGET_AVX512F)
5201#define HAVE_avx512f_fixupimmv16sf_maskz (TARGET_AVX512F)
5202#define HAVE_avx512f_fixupimmv16sf_maskz_round (TARGET_AVX512F)
5203#define HAVE_avx512vl_fixupimmv8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5204#define HAVE_avx512vl_fixupimmv8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5205#define HAVE_avx512vl_fixupimmv4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5206#define HAVE_avx512vl_fixupimmv4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5207#define HAVE_avx512f_fixupimmv8df_maskz (TARGET_AVX512F)
5208#define HAVE_avx512f_fixupimmv8df_maskz_round (TARGET_AVX512F)
5209#define HAVE_avx512vl_fixupimmv4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5210#define HAVE_avx512vl_fixupimmv4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5211#define HAVE_avx512vl_fixupimmv2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
5212#define HAVE_avx512vl_fixupimmv2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5213#define HAVE_avx512f_sfixupimmv4sf_maskz (TARGET_AVX512F)
5214#define HAVE_avx512f_sfixupimmv4sf_maskz_round (TARGET_AVX512F)
5215#define HAVE_avx512f_sfixupimmv2df_maskz ((TARGET_AVX512F) && (TARGET_SSE2))
5216#define HAVE_avx512f_sfixupimmv2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
5217#define HAVE_avx512f_shufpd512_mask (TARGET_AVX512F)
5218#define HAVE_avx_shufpd256 (TARGET_AVX)
5219#define HAVE_avx_shufpd256_mask ((TARGET_AVX512VL) && (TARGET_AVX))
5220#define HAVE_sse2_shufpd (TARGET_SSE2)
5221#define HAVE_sse2_shufpd_mask ((TARGET_AVX512VL) && (TARGET_SSE2))
5222#define HAVE_sse2_loadhpd_exp (TARGET_SSE2)
5223#define HAVE_sse2_loadlpd_exp (TARGET_SSE2)
5224#define HAVE_avx512f_ss_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
5225#define HAVE_avx512f_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
5226#define HAVE_avx512f_us_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
5227#define HAVE_avx512f_ss_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
5228#define HAVE_avx512f_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
5229#define HAVE_avx512f_us_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
5230#define HAVE_avx512f_ss_truncatev8div8si2_mask_store (TARGET_AVX512F)
5231#define HAVE_avx512f_truncatev8div8si2_mask_store (TARGET_AVX512F)
5232#define HAVE_avx512f_us_truncatev8div8si2_mask_store (TARGET_AVX512F)
5233#define HAVE_avx512f_ss_truncatev8div8hi2_mask_store (TARGET_AVX512F)
5234#define HAVE_avx512f_truncatev8div8hi2_mask_store (TARGET_AVX512F)
5235#define HAVE_avx512f_us_truncatev8div8hi2_mask_store (TARGET_AVX512F)
5236#define HAVE_avx512bw_ss_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
5237#define HAVE_avx512bw_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
5238#define HAVE_avx512bw_us_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
5239#define HAVE_avx512vl_ss_truncatev4div4si2_mask_store (TARGET_AVX512VL)
5240#define HAVE_avx512vl_truncatev4div4si2_mask_store (TARGET_AVX512VL)
5241#define HAVE_avx512vl_us_truncatev4div4si2_mask_store (TARGET_AVX512VL)
5242#define HAVE_avx512vl_ss_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
5243#define HAVE_avx512vl_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
5244#define HAVE_avx512vl_us_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
5245#define HAVE_avx512vl_ss_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
5246#define HAVE_avx512vl_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
5247#define HAVE_avx512vl_us_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
5248#define HAVE_negv64qi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
5249#define HAVE_negv32qi2 ((TARGET_SSE2) && (TARGET_AVX2))
5250#define HAVE_negv16qi2 (TARGET_SSE2)
5251#define HAVE_negv32hi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
5252#define HAVE_negv16hi2 ((TARGET_SSE2) && (TARGET_AVX2))
5253#define HAVE_negv8hi2 (TARGET_SSE2)
5254#define HAVE_negv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
5255#define HAVE_negv8si2 ((TARGET_SSE2) && (TARGET_AVX2))
5256#define HAVE_negv4si2 (TARGET_SSE2)
5257#define HAVE_negv8di2 ((TARGET_SSE2) && (TARGET_AVX512F))
5258#define HAVE_negv4di2 ((TARGET_SSE2) && (TARGET_AVX2))
5259#define HAVE_negv2di2 (TARGET_SSE2)
5260#define HAVE_addv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
5261#define HAVE_subv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
5262#define HAVE_addv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
5263#define HAVE_subv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
5264#define HAVE_addv16qi3 (TARGET_SSE2)
5265#define HAVE_subv16qi3 (TARGET_SSE2)
5266#define HAVE_addv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
5267#define HAVE_subv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
5268#define HAVE_addv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
5269#define HAVE_subv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
5270#define HAVE_addv8hi3 (TARGET_SSE2)
5271#define HAVE_subv8hi3 (TARGET_SSE2)
5272#define HAVE_addv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
5273#define HAVE_subv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
5274#define HAVE_addv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
5275#define HAVE_subv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
5276#define HAVE_addv4si3 (TARGET_SSE2)
5277#define HAVE_subv4si3 (TARGET_SSE2)
5278#define HAVE_addv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
5279#define HAVE_subv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
5280#define HAVE_addv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
5281#define HAVE_subv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
5282#define HAVE_addv2di3 (TARGET_SSE2)
5283#define HAVE_subv2di3 (TARGET_SSE2)
5284#define HAVE_addv16si3_mask (TARGET_AVX512F)
5285#define HAVE_subv16si3_mask (TARGET_AVX512F)
5286#define HAVE_addv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5287#define HAVE_subv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5288#define HAVE_addv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5289#define HAVE_subv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5290#define HAVE_addv8di3_mask (TARGET_AVX512F)
5291#define HAVE_subv8di3_mask (TARGET_AVX512F)
5292#define HAVE_addv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5293#define HAVE_subv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5294#define HAVE_addv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5295#define HAVE_subv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5296#define HAVE_addv64qi3_mask (TARGET_AVX512BW)
5297#define HAVE_subv64qi3_mask (TARGET_AVX512BW)
5298#define HAVE_addv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5299#define HAVE_subv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5300#define HAVE_addv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5301#define HAVE_subv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5302#define HAVE_addv32hi3_mask (TARGET_AVX512BW)
5303#define HAVE_subv32hi3_mask (TARGET_AVX512BW)
5304#define HAVE_addv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5305#define HAVE_subv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5306#define HAVE_addv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5307#define HAVE_subv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5308#define HAVE_avx512bw_ssaddv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5309#define HAVE_avx512bw_ssaddv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5310#define HAVE_avx512bw_usaddv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5311#define HAVE_avx512bw_usaddv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5312#define HAVE_avx512bw_sssubv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5313#define HAVE_avx512bw_sssubv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5314#define HAVE_avx512bw_ussubv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5315#define HAVE_avx512bw_ussubv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5316#define HAVE_avx2_ssaddv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5317#define HAVE_avx2_ssaddv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5318#define HAVE_avx2_usaddv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5319#define HAVE_avx2_usaddv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5320#define HAVE_avx2_sssubv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5321#define HAVE_avx2_sssubv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5322#define HAVE_avx2_ussubv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5323#define HAVE_avx2_ussubv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5324#define HAVE_sse2_ssaddv16qi3 (TARGET_SSE2 && 1 && 1)
5325#define HAVE_sse2_ssaddv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5326#define HAVE_sse2_usaddv16qi3 (TARGET_SSE2 && 1 && 1)
5327#define HAVE_sse2_usaddv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5328#define HAVE_sse2_sssubv16qi3 (TARGET_SSE2 && 1 && 1)
5329#define HAVE_sse2_sssubv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5330#define HAVE_sse2_ussubv16qi3 (TARGET_SSE2 && 1 && 1)
5331#define HAVE_sse2_ussubv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5332#define HAVE_avx512bw_ssaddv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5333#define HAVE_avx512bw_ssaddv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5334#define HAVE_avx512bw_usaddv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5335#define HAVE_avx512bw_usaddv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5336#define HAVE_avx512bw_sssubv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5337#define HAVE_avx512bw_sssubv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5338#define HAVE_avx512bw_ussubv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5339#define HAVE_avx512bw_ussubv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5340#define HAVE_avx2_ssaddv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5341#define HAVE_avx2_ssaddv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5342#define HAVE_avx2_usaddv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5343#define HAVE_avx2_usaddv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5344#define HAVE_avx2_sssubv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5345#define HAVE_avx2_sssubv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5346#define HAVE_avx2_ussubv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5347#define HAVE_avx2_ussubv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5348#define HAVE_sse2_ssaddv8hi3 (TARGET_SSE2 && 1 && 1)
5349#define HAVE_sse2_ssaddv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5350#define HAVE_sse2_usaddv8hi3 (TARGET_SSE2 && 1 && 1)
5351#define HAVE_sse2_usaddv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5352#define HAVE_sse2_sssubv8hi3 (TARGET_SSE2 && 1 && 1)
5353#define HAVE_sse2_sssubv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5354#define HAVE_sse2_ussubv8hi3 (TARGET_SSE2 && 1 && 1)
5355#define HAVE_sse2_ussubv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5356#define HAVE_mulv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5357#define HAVE_mulv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5358#define HAVE_mulv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5359#define HAVE_mulv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5360#define HAVE_mulv16qi3 (TARGET_SSE2 && 1 && 1)
5361#define HAVE_mulv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5362#define HAVE_mulv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
5363#define HAVE_mulv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5364#define HAVE_mulv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
5365#define HAVE_mulv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5366#define HAVE_mulv8hi3 (TARGET_SSE2 && 1 && 1)
5367#define HAVE_mulv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5368#define HAVE_smulv32hi3_highpart ((TARGET_SSE2 \
5369 && 1 && 1) && (TARGET_AVX512BW))
5370#define HAVE_smulv32hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
5371 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5372#define HAVE_umulv32hi3_highpart ((TARGET_SSE2 \
5373 && 1 && 1) && (TARGET_AVX512BW))
5374#define HAVE_umulv32hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
5375 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
5376#define HAVE_smulv16hi3_highpart ((TARGET_SSE2 \
5377 && 1 && 1) && (TARGET_AVX2))
5378#define HAVE_smulv16hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
5379 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5380#define HAVE_umulv16hi3_highpart ((TARGET_SSE2 \
5381 && 1 && 1) && (TARGET_AVX2))
5382#define HAVE_umulv16hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
5383 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
5384#define HAVE_smulv8hi3_highpart (TARGET_SSE2 \
5385 && 1 && 1)
5386#define HAVE_smulv8hi3_highpart_mask ((TARGET_AVX512F) && (TARGET_SSE2 \
5387 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5388#define HAVE_umulv8hi3_highpart (TARGET_SSE2 \
5389 && 1 && 1)
5390#define HAVE_umulv8hi3_highpart_mask ((TARGET_AVX512F) && (TARGET_SSE2 \
5391 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
5392#define HAVE_vec_widen_umult_even_v16si (TARGET_AVX512F)
5393#define HAVE_vec_widen_umult_even_v16si_mask (TARGET_AVX512F)
5394#define HAVE_vec_widen_umult_even_v8si (TARGET_AVX2 && 1)
5395#define HAVE_vec_widen_umult_even_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
5396#define HAVE_vec_widen_umult_even_v4si (TARGET_SSE2 && 1)
5397#define HAVE_vec_widen_umult_even_v4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
5398#define HAVE_vec_widen_smult_even_v16si (TARGET_AVX512F)
5399#define HAVE_vec_widen_smult_even_v16si_mask (TARGET_AVX512F)
5400#define HAVE_vec_widen_smult_even_v8si (TARGET_AVX2 && 1)
5401#define HAVE_vec_widen_smult_even_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
5402#define HAVE_sse4_1_mulv2siv2di3 (TARGET_SSE4_1 && 1)
5403#define HAVE_sse4_1_mulv2siv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
5404#define HAVE_avx2_pmaddwd (TARGET_AVX2)
5405#define HAVE_sse2_pmaddwd (TARGET_SSE2)
5406#define HAVE_mulv16si3 ((TARGET_SSE2 && 1) && (TARGET_AVX512F))
5407#define HAVE_mulv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
5408#define HAVE_mulv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
5409#define HAVE_mulv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
5410#define HAVE_mulv4si3 (TARGET_SSE2 && 1)
5411#define HAVE_mulv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
5412#define HAVE_mulv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
5413#define HAVE_mulv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
5414#define HAVE_mulv2di3 (TARGET_SSE2)
5415#define HAVE_vec_widen_smult_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
5416#define HAVE_vec_widen_umult_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
5417#define HAVE_vec_widen_smult_hi_v16qi (TARGET_SSE2)
5418#define HAVE_vec_widen_umult_hi_v16qi (TARGET_SSE2)
5419#define HAVE_vec_widen_smult_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5420#define HAVE_vec_widen_umult_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5421#define HAVE_vec_widen_smult_hi_v8hi (TARGET_SSE2)
5422#define HAVE_vec_widen_umult_hi_v8hi (TARGET_SSE2)
5423#define HAVE_vec_widen_smult_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5424#define HAVE_vec_widen_umult_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5425#define HAVE_vec_widen_smult_hi_v4si (TARGET_SSE2)
5426#define HAVE_vec_widen_umult_hi_v4si (TARGET_SSE2)
5427#define HAVE_vec_widen_smult_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
5428#define HAVE_vec_widen_umult_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
5429#define HAVE_vec_widen_smult_lo_v16qi (TARGET_SSE2)
5430#define HAVE_vec_widen_umult_lo_v16qi (TARGET_SSE2)
5431#define HAVE_vec_widen_smult_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5432#define HAVE_vec_widen_umult_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
5433#define HAVE_vec_widen_smult_lo_v8hi (TARGET_SSE2)
5434#define HAVE_vec_widen_umult_lo_v8hi (TARGET_SSE2)
5435#define HAVE_vec_widen_smult_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5436#define HAVE_vec_widen_umult_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5437#define HAVE_vec_widen_smult_lo_v4si (TARGET_SSE2)
5438#define HAVE_vec_widen_umult_lo_v4si (TARGET_SSE2)
5439#define HAVE_vec_widen_smult_even_v4si (TARGET_SSE2)
5440#define HAVE_vec_widen_smult_odd_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
5441#define HAVE_vec_widen_umult_odd_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
5442#define HAVE_vec_widen_smult_odd_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5443#define HAVE_vec_widen_umult_odd_v8si ((TARGET_SSE2) && (TARGET_AVX2))
5444#define HAVE_vec_widen_smult_odd_v4si (TARGET_SSE2)
5445#define HAVE_vec_widen_umult_odd_v4si (TARGET_SSE2)
5446#define HAVE_sdot_prodv32hi ((TARGET_SSE2) && (TARGET_AVX512BW))
5447#define HAVE_sdot_prodv16hi ((TARGET_SSE2) && (TARGET_AVX2))
5448#define HAVE_sdot_prodv8hi (TARGET_SSE2)
5449#define HAVE_sdot_prodv4si (TARGET_XOP)
5450#define HAVE_usadv16qi (TARGET_SSE2)
5451#define HAVE_usadv32qi (TARGET_AVX2)
5452#define HAVE_vec_shl_v16qi (TARGET_SSE2)
5453#define HAVE_vec_shl_v8hi (TARGET_SSE2)
5454#define HAVE_vec_shl_v4si (TARGET_SSE2)
5455#define HAVE_vec_shl_v2di (TARGET_SSE2)
5456#define HAVE_vec_shr_v16qi (TARGET_SSE2)
5457#define HAVE_vec_shr_v8hi (TARGET_SSE2)
5458#define HAVE_vec_shr_v4si (TARGET_SSE2)
5459#define HAVE_vec_shr_v2di (TARGET_SSE2)
5460#define HAVE_smaxv32qi3 (TARGET_AVX2)
5461#define HAVE_sminv32qi3 (TARGET_AVX2)
5462#define HAVE_umaxv32qi3 (TARGET_AVX2)
5463#define HAVE_uminv32qi3 (TARGET_AVX2)
5464#define HAVE_smaxv16hi3 (TARGET_AVX2)
5465#define HAVE_sminv16hi3 (TARGET_AVX2)
5466#define HAVE_umaxv16hi3 (TARGET_AVX2)
5467#define HAVE_uminv16hi3 (TARGET_AVX2)
5468#define HAVE_smaxv8si3 (TARGET_AVX2)
5469#define HAVE_sminv8si3 (TARGET_AVX2)
5470#define HAVE_umaxv8si3 (TARGET_AVX2)
5471#define HAVE_uminv8si3 (TARGET_AVX2)
5472#define HAVE_smaxv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5473#define HAVE_sminv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5474#define HAVE_umaxv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5475#define HAVE_uminv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5476#define HAVE_smaxv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5477#define HAVE_sminv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5478#define HAVE_umaxv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5479#define HAVE_uminv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
5480#define HAVE_smaxv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
5481#define HAVE_sminv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
5482#define HAVE_umaxv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
5483#define HAVE_uminv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
5484#define HAVE_smaxv16si3_mask (TARGET_AVX512F)
5485#define HAVE_sminv16si3_mask (TARGET_AVX512F)
5486#define HAVE_umaxv16si3_mask (TARGET_AVX512F)
5487#define HAVE_uminv16si3_mask (TARGET_AVX512F)
5488#define HAVE_smaxv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5489#define HAVE_sminv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5490#define HAVE_umaxv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5491#define HAVE_uminv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5492#define HAVE_smaxv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5493#define HAVE_sminv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5494#define HAVE_umaxv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5495#define HAVE_uminv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5496#define HAVE_smaxv8di3_mask (TARGET_AVX512F)
5497#define HAVE_sminv8di3_mask (TARGET_AVX512F)
5498#define HAVE_umaxv8di3_mask (TARGET_AVX512F)
5499#define HAVE_uminv8di3_mask (TARGET_AVX512F)
5500#define HAVE_smaxv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5501#define HAVE_sminv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5502#define HAVE_umaxv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5503#define HAVE_uminv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5504#define HAVE_smaxv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5505#define HAVE_sminv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5506#define HAVE_umaxv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5507#define HAVE_uminv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
5508#define HAVE_smaxv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
5509#define HAVE_sminv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
5510#define HAVE_umaxv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
5511#define HAVE_uminv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
5512#define HAVE_smaxv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
5513#define HAVE_sminv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
5514#define HAVE_umaxv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
5515#define HAVE_uminv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
5516#define HAVE_smaxv2di3 (TARGET_SSE4_2)
5517#define HAVE_sminv2di3 (TARGET_SSE4_2)
5518#define HAVE_umaxv2di3 (TARGET_SSE4_2)
5519#define HAVE_uminv2di3 (TARGET_SSE4_2)
5520#define HAVE_smaxv16qi3 (TARGET_SSE2)
5521#define HAVE_sminv16qi3 (TARGET_SSE2)
5522#define HAVE_smaxv8hi3 (TARGET_SSE2)
5523#define HAVE_sminv8hi3 (TARGET_SSE2)
5524#define HAVE_smaxv4si3 (TARGET_SSE2)
5525#define HAVE_sminv4si3 (TARGET_SSE2)
5526#define HAVE_umaxv16qi3 (TARGET_SSE2)
5527#define HAVE_uminv16qi3 (TARGET_SSE2)
5528#define HAVE_umaxv8hi3 (TARGET_SSE2)
5529#define HAVE_uminv8hi3 (TARGET_SSE2)
5530#define HAVE_umaxv4si3 (TARGET_SSE2)
5531#define HAVE_uminv4si3 (TARGET_SSE2)
5532#define HAVE_avx2_eqv32qi3 (TARGET_AVX2)
5533#define HAVE_avx2_eqv16hi3 (TARGET_AVX2)
5534#define HAVE_avx2_eqv8si3 (TARGET_AVX2)
5535#define HAVE_avx2_eqv4di3 (TARGET_AVX2)
5536#define HAVE_avx512bw_eqv64qi3 (TARGET_AVX512BW)
5537#define HAVE_avx512bw_eqv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
5538#define HAVE_avx512vl_eqv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5539#define HAVE_avx512vl_eqv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
5540#define HAVE_avx512vl_eqv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5541#define HAVE_avx512vl_eqv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
5542#define HAVE_avx512bw_eqv32hi3 (TARGET_AVX512BW)
5543#define HAVE_avx512bw_eqv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
5544#define HAVE_avx512vl_eqv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5545#define HAVE_avx512vl_eqv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
5546#define HAVE_avx512vl_eqv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
5547#define HAVE_avx512vl_eqv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
5548#define HAVE_avx512f_eqv16si3 (TARGET_AVX512F)
5549#define HAVE_avx512f_eqv16si3_mask (TARGET_AVX512F)
5550#define HAVE_avx512vl_eqv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
5551#define HAVE_avx512vl_eqv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5552#define HAVE_avx512vl_eqv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
5553#define HAVE_avx512vl_eqv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5554#define HAVE_avx512f_eqv8di3 (TARGET_AVX512F)
5555#define HAVE_avx512f_eqv8di3_mask (TARGET_AVX512F)
5556#define HAVE_avx512vl_eqv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
5557#define HAVE_avx512vl_eqv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5558#define HAVE_avx512vl_eqv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
5559#define HAVE_avx512vl_eqv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
5560#define HAVE_sse2_eqv16qi3 (TARGET_SSE2 && !TARGET_XOP )
5561#define HAVE_sse2_eqv8hi3 (TARGET_SSE2 && !TARGET_XOP )
5562#define HAVE_sse2_eqv4si3 (TARGET_SSE2 && !TARGET_XOP )
5563#define HAVE_sse4_1_eqv2di3 (TARGET_SSE4_1)
5564#define HAVE_vcondv64qiv16si (TARGET_AVX512F \
5565 && (GET_MODE_NUNITS (V64QImode) \
5566 == GET_MODE_NUNITS (V16SImode)))
5567#define HAVE_vcondv32hiv16si (TARGET_AVX512F \
5568 && (GET_MODE_NUNITS (V32HImode) \
5569 == GET_MODE_NUNITS (V16SImode)))
5570#define HAVE_vcondv16siv16si (TARGET_AVX512F \
5571 && (GET_MODE_NUNITS (V16SImode) \
5572 == GET_MODE_NUNITS (V16SImode)))
5573#define HAVE_vcondv8div16si (TARGET_AVX512F \
5574 && (GET_MODE_NUNITS (V8DImode) \
5575 == GET_MODE_NUNITS (V16SImode)))
5576#define HAVE_vcondv16sfv16si (TARGET_AVX512F \
5577 && (GET_MODE_NUNITS (V16SFmode) \
5578 == GET_MODE_NUNITS (V16SImode)))
5579#define HAVE_vcondv8dfv16si (TARGET_AVX512F \
5580 && (GET_MODE_NUNITS (V8DFmode) \
5581 == GET_MODE_NUNITS (V16SImode)))
5582#define HAVE_vcondv64qiv8di (TARGET_AVX512F \
5583 && (GET_MODE_NUNITS (V64QImode) \
5584 == GET_MODE_NUNITS (V8DImode)))
5585#define HAVE_vcondv32hiv8di (TARGET_AVX512F \
5586 && (GET_MODE_NUNITS (V32HImode) \
5587 == GET_MODE_NUNITS (V8DImode)))
5588#define HAVE_vcondv16siv8di (TARGET_AVX512F \
5589 && (GET_MODE_NUNITS (V16SImode) \
5590 == GET_MODE_NUNITS (V8DImode)))
5591#define HAVE_vcondv8div8di (TARGET_AVX512F \
5592 && (GET_MODE_NUNITS (V8DImode) \
5593 == GET_MODE_NUNITS (V8DImode)))
5594#define HAVE_vcondv16sfv8di (TARGET_AVX512F \
5595 && (GET_MODE_NUNITS (V16SFmode) \
5596 == GET_MODE_NUNITS (V8DImode)))
5597#define HAVE_vcondv8dfv8di (TARGET_AVX512F \
5598 && (GET_MODE_NUNITS (V8DFmode) \
5599 == GET_MODE_NUNITS (V8DImode)))
5600#define HAVE_vcondv64qiv32hi ((TARGET_AVX512F \
5601 && (GET_MODE_NUNITS (V64QImode) \
5602 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5603#define HAVE_vcondv32hiv32hi ((TARGET_AVX512F \
5604 && (GET_MODE_NUNITS (V32HImode) \
5605 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5606#define HAVE_vcondv16siv32hi ((TARGET_AVX512F \
5607 && (GET_MODE_NUNITS (V16SImode) \
5608 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5609#define HAVE_vcondv8div32hi ((TARGET_AVX512F \
5610 && (GET_MODE_NUNITS (V8DImode) \
5611 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5612#define HAVE_vcondv16sfv32hi ((TARGET_AVX512F \
5613 && (GET_MODE_NUNITS (V16SFmode) \
5614 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5615#define HAVE_vcondv8dfv32hi ((TARGET_AVX512F \
5616 && (GET_MODE_NUNITS (V8DFmode) \
5617 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5618#define HAVE_vcondv64qiv64qi ((TARGET_AVX512F \
5619 && (GET_MODE_NUNITS (V64QImode) \
5620 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5621#define HAVE_vcondv32hiv64qi ((TARGET_AVX512F \
5622 && (GET_MODE_NUNITS (V32HImode) \
5623 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5624#define HAVE_vcondv16siv64qi ((TARGET_AVX512F \
5625 && (GET_MODE_NUNITS (V16SImode) \
5626 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5627#define HAVE_vcondv8div64qi ((TARGET_AVX512F \
5628 && (GET_MODE_NUNITS (V8DImode) \
5629 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5630#define HAVE_vcondv16sfv64qi ((TARGET_AVX512F \
5631 && (GET_MODE_NUNITS (V16SFmode) \
5632 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5633#define HAVE_vcondv8dfv64qi ((TARGET_AVX512F \
5634 && (GET_MODE_NUNITS (V8DFmode) \
5635 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5636#define HAVE_vcondv32qiv32qi (TARGET_AVX2 \
5637 && (GET_MODE_NUNITS (V32QImode) \
5638 == GET_MODE_NUNITS (V32QImode)))
5639#define HAVE_vcondv32qiv16hi (TARGET_AVX2 \
5640 && (GET_MODE_NUNITS (V32QImode) \
5641 == GET_MODE_NUNITS (V16HImode)))
5642#define HAVE_vcondv32qiv8si (TARGET_AVX2 \
5643 && (GET_MODE_NUNITS (V32QImode) \
5644 == GET_MODE_NUNITS (V8SImode)))
5645#define HAVE_vcondv32qiv4di (TARGET_AVX2 \
5646 && (GET_MODE_NUNITS (V32QImode) \
5647 == GET_MODE_NUNITS (V4DImode)))
5648#define HAVE_vcondv16hiv32qi (TARGET_AVX2 \
5649 && (GET_MODE_NUNITS (V16HImode) \
5650 == GET_MODE_NUNITS (V32QImode)))
5651#define HAVE_vcondv16hiv16hi (TARGET_AVX2 \
5652 && (GET_MODE_NUNITS (V16HImode) \
5653 == GET_MODE_NUNITS (V16HImode)))
5654#define HAVE_vcondv16hiv8si (TARGET_AVX2 \
5655 && (GET_MODE_NUNITS (V16HImode) \
5656 == GET_MODE_NUNITS (V8SImode)))
5657#define HAVE_vcondv16hiv4di (TARGET_AVX2 \
5658 && (GET_MODE_NUNITS (V16HImode) \
5659 == GET_MODE_NUNITS (V4DImode)))
5660#define HAVE_vcondv8siv32qi (TARGET_AVX2 \
5661 && (GET_MODE_NUNITS (V8SImode) \
5662 == GET_MODE_NUNITS (V32QImode)))
5663#define HAVE_vcondv8siv16hi (TARGET_AVX2 \
5664 && (GET_MODE_NUNITS (V8SImode) \
5665 == GET_MODE_NUNITS (V16HImode)))
5666#define HAVE_vcondv8siv8si (TARGET_AVX2 \
5667 && (GET_MODE_NUNITS (V8SImode) \
5668 == GET_MODE_NUNITS (V8SImode)))
5669#define HAVE_vcondv8siv4di (TARGET_AVX2 \
5670 && (GET_MODE_NUNITS (V8SImode) \
5671 == GET_MODE_NUNITS (V4DImode)))
5672#define HAVE_vcondv4div32qi (TARGET_AVX2 \
5673 && (GET_MODE_NUNITS (V4DImode) \
5674 == GET_MODE_NUNITS (V32QImode)))
5675#define HAVE_vcondv4div16hi (TARGET_AVX2 \
5676 && (GET_MODE_NUNITS (V4DImode) \
5677 == GET_MODE_NUNITS (V16HImode)))
5678#define HAVE_vcondv4div8si (TARGET_AVX2 \
5679 && (GET_MODE_NUNITS (V4DImode) \
5680 == GET_MODE_NUNITS (V8SImode)))
5681#define HAVE_vcondv4div4di (TARGET_AVX2 \
5682 && (GET_MODE_NUNITS (V4DImode) \
5683 == GET_MODE_NUNITS (V4DImode)))
5684#define HAVE_vcondv8sfv32qi (TARGET_AVX2 \
5685 && (GET_MODE_NUNITS (V8SFmode) \
5686 == GET_MODE_NUNITS (V32QImode)))
5687#define HAVE_vcondv8sfv16hi (TARGET_AVX2 \
5688 && (GET_MODE_NUNITS (V8SFmode) \
5689 == GET_MODE_NUNITS (V16HImode)))
5690#define HAVE_vcondv8sfv8si (TARGET_AVX2 \
5691 && (GET_MODE_NUNITS (V8SFmode) \
5692 == GET_MODE_NUNITS (V8SImode)))
5693#define HAVE_vcondv8sfv4di (TARGET_AVX2 \
5694 && (GET_MODE_NUNITS (V8SFmode) \
5695 == GET_MODE_NUNITS (V4DImode)))
5696#define HAVE_vcondv4dfv32qi (TARGET_AVX2 \
5697 && (GET_MODE_NUNITS (V4DFmode) \
5698 == GET_MODE_NUNITS (V32QImode)))
5699#define HAVE_vcondv4dfv16hi (TARGET_AVX2 \
5700 && (GET_MODE_NUNITS (V4DFmode) \
5701 == GET_MODE_NUNITS (V16HImode)))
5702#define HAVE_vcondv4dfv8si (TARGET_AVX2 \
5703 && (GET_MODE_NUNITS (V4DFmode) \
5704 == GET_MODE_NUNITS (V8SImode)))
5705#define HAVE_vcondv4dfv4di (TARGET_AVX2 \
5706 && (GET_MODE_NUNITS (V4DFmode) \
5707 == GET_MODE_NUNITS (V4DImode)))
5708#define HAVE_vcondv16qiv16qi (TARGET_SSE2 \
5709 && (GET_MODE_NUNITS (V16QImode) \
5710 == GET_MODE_NUNITS (V16QImode)))
5711#define HAVE_vcondv8hiv16qi (TARGET_SSE2 \
5712 && (GET_MODE_NUNITS (V8HImode) \
5713 == GET_MODE_NUNITS (V16QImode)))
5714#define HAVE_vcondv4siv16qi (TARGET_SSE2 \
5715 && (GET_MODE_NUNITS (V4SImode) \
5716 == GET_MODE_NUNITS (V16QImode)))
5717#define HAVE_vcondv2div16qi (TARGET_SSE2 \
5718 && (GET_MODE_NUNITS (V2DImode) \
5719 == GET_MODE_NUNITS (V16QImode)))
5720#define HAVE_vcondv4sfv16qi (TARGET_SSE2 \
5721 && (GET_MODE_NUNITS (V4SFmode) \
5722 == GET_MODE_NUNITS (V16QImode)))
5723#define HAVE_vcondv2dfv16qi ((TARGET_SSE2 \
5724 && (GET_MODE_NUNITS (V2DFmode) \
5725 == GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE2))
5726#define HAVE_vcondv16qiv8hi (TARGET_SSE2 \
5727 && (GET_MODE_NUNITS (V16QImode) \
5728 == GET_MODE_NUNITS (V8HImode)))
5729#define HAVE_vcondv8hiv8hi (TARGET_SSE2 \
5730 && (GET_MODE_NUNITS (V8HImode) \
5731 == GET_MODE_NUNITS (V8HImode)))
5732#define HAVE_vcondv4siv8hi (TARGET_SSE2 \
5733 && (GET_MODE_NUNITS (V4SImode) \
5734 == GET_MODE_NUNITS (V8HImode)))
5735#define HAVE_vcondv2div8hi (TARGET_SSE2 \
5736 && (GET_MODE_NUNITS (V2DImode) \
5737 == GET_MODE_NUNITS (V8HImode)))
5738#define HAVE_vcondv4sfv8hi (TARGET_SSE2 \
5739 && (GET_MODE_NUNITS (V4SFmode) \
5740 == GET_MODE_NUNITS (V8HImode)))
5741#define HAVE_vcondv2dfv8hi ((TARGET_SSE2 \
5742 && (GET_MODE_NUNITS (V2DFmode) \
5743 == GET_MODE_NUNITS (V8HImode))) && (TARGET_SSE2))
5744#define HAVE_vcondv16qiv4si (TARGET_SSE2 \
5745 && (GET_MODE_NUNITS (V16QImode) \
5746 == GET_MODE_NUNITS (V4SImode)))
5747#define HAVE_vcondv8hiv4si (TARGET_SSE2 \
5748 && (GET_MODE_NUNITS (V8HImode) \
5749 == GET_MODE_NUNITS (V4SImode)))
5750#define HAVE_vcondv4siv4si (TARGET_SSE2 \
5751 && (GET_MODE_NUNITS (V4SImode) \
5752 == GET_MODE_NUNITS (V4SImode)))
5753#define HAVE_vcondv2div4si (TARGET_SSE2 \
5754 && (GET_MODE_NUNITS (V2DImode) \
5755 == GET_MODE_NUNITS (V4SImode)))
5756#define HAVE_vcondv4sfv4si (TARGET_SSE2 \
5757 && (GET_MODE_NUNITS (V4SFmode) \
5758 == GET_MODE_NUNITS (V4SImode)))
5759#define HAVE_vcondv2dfv4si ((TARGET_SSE2 \
5760 && (GET_MODE_NUNITS (V2DFmode) \
5761 == GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE2))
5762#define HAVE_vcondv2div2di (TARGET_SSE4_2)
5763#define HAVE_vcondv2dfv2di (TARGET_SSE4_2)
5764#define HAVE_vconduv64qiv16si (TARGET_AVX512F \
5765 && (GET_MODE_NUNITS (V64QImode) \
5766 == GET_MODE_NUNITS (V16SImode)))
5767#define HAVE_vconduv32hiv16si (TARGET_AVX512F \
5768 && (GET_MODE_NUNITS (V32HImode) \
5769 == GET_MODE_NUNITS (V16SImode)))
5770#define HAVE_vconduv16siv16si (TARGET_AVX512F \
5771 && (GET_MODE_NUNITS (V16SImode) \
5772 == GET_MODE_NUNITS (V16SImode)))
5773#define HAVE_vconduv8div16si (TARGET_AVX512F \
5774 && (GET_MODE_NUNITS (V8DImode) \
5775 == GET_MODE_NUNITS (V16SImode)))
5776#define HAVE_vconduv16sfv16si (TARGET_AVX512F \
5777 && (GET_MODE_NUNITS (V16SFmode) \
5778 == GET_MODE_NUNITS (V16SImode)))
5779#define HAVE_vconduv8dfv16si (TARGET_AVX512F \
5780 && (GET_MODE_NUNITS (V8DFmode) \
5781 == GET_MODE_NUNITS (V16SImode)))
5782#define HAVE_vconduv64qiv8di (TARGET_AVX512F \
5783 && (GET_MODE_NUNITS (V64QImode) \
5784 == GET_MODE_NUNITS (V8DImode)))
5785#define HAVE_vconduv32hiv8di (TARGET_AVX512F \
5786 && (GET_MODE_NUNITS (V32HImode) \
5787 == GET_MODE_NUNITS (V8DImode)))
5788#define HAVE_vconduv16siv8di (TARGET_AVX512F \
5789 && (GET_MODE_NUNITS (V16SImode) \
5790 == GET_MODE_NUNITS (V8DImode)))
5791#define HAVE_vconduv8div8di (TARGET_AVX512F \
5792 && (GET_MODE_NUNITS (V8DImode) \
5793 == GET_MODE_NUNITS (V8DImode)))
5794#define HAVE_vconduv16sfv8di (TARGET_AVX512F \
5795 && (GET_MODE_NUNITS (V16SFmode) \
5796 == GET_MODE_NUNITS (V8DImode)))
5797#define HAVE_vconduv8dfv8di (TARGET_AVX512F \
5798 && (GET_MODE_NUNITS (V8DFmode) \
5799 == GET_MODE_NUNITS (V8DImode)))
5800#define HAVE_vconduv64qiv32hi ((TARGET_AVX512F \
5801 && (GET_MODE_NUNITS (V64QImode) \
5802 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5803#define HAVE_vconduv32hiv32hi ((TARGET_AVX512F \
5804 && (GET_MODE_NUNITS (V32HImode) \
5805 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5806#define HAVE_vconduv16siv32hi ((TARGET_AVX512F \
5807 && (GET_MODE_NUNITS (V16SImode) \
5808 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5809#define HAVE_vconduv8div32hi ((TARGET_AVX512F \
5810 && (GET_MODE_NUNITS (V8DImode) \
5811 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5812#define HAVE_vconduv16sfv32hi ((TARGET_AVX512F \
5813 && (GET_MODE_NUNITS (V16SFmode) \
5814 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5815#define HAVE_vconduv8dfv32hi ((TARGET_AVX512F \
5816 && (GET_MODE_NUNITS (V8DFmode) \
5817 == GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
5818#define HAVE_vconduv64qiv64qi ((TARGET_AVX512F \
5819 && (GET_MODE_NUNITS (V64QImode) \
5820 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5821#define HAVE_vconduv32hiv64qi ((TARGET_AVX512F \
5822 && (GET_MODE_NUNITS (V32HImode) \
5823 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5824#define HAVE_vconduv16siv64qi ((TARGET_AVX512F \
5825 && (GET_MODE_NUNITS (V16SImode) \
5826 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5827#define HAVE_vconduv8div64qi ((TARGET_AVX512F \
5828 && (GET_MODE_NUNITS (V8DImode) \
5829 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5830#define HAVE_vconduv16sfv64qi ((TARGET_AVX512F \
5831 && (GET_MODE_NUNITS (V16SFmode) \
5832 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5833#define HAVE_vconduv8dfv64qi ((TARGET_AVX512F \
5834 && (GET_MODE_NUNITS (V8DFmode) \
5835 == GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
5836#define HAVE_vconduv32qiv32qi (TARGET_AVX2 \
5837 && (GET_MODE_NUNITS (V32QImode) \
5838 == GET_MODE_NUNITS (V32QImode)))
5839#define HAVE_vconduv32qiv16hi (TARGET_AVX2 \
5840 && (GET_MODE_NUNITS (V32QImode) \
5841 == GET_MODE_NUNITS (V16HImode)))
5842#define HAVE_vconduv32qiv8si (TARGET_AVX2 \
5843 && (GET_MODE_NUNITS (V32QImode) \
5844 == GET_MODE_NUNITS (V8SImode)))
5845#define HAVE_vconduv32qiv4di (TARGET_AVX2 \
5846 && (GET_MODE_NUNITS (V32QImode) \
5847 == GET_MODE_NUNITS (V4DImode)))
5848#define HAVE_vconduv16hiv32qi (TARGET_AVX2 \
5849 && (GET_MODE_NUNITS (V16HImode) \
5850 == GET_MODE_NUNITS (V32QImode)))
5851#define HAVE_vconduv16hiv16hi (TARGET_AVX2 \
5852 && (GET_MODE_NUNITS (V16HImode) \
5853 == GET_MODE_NUNITS (V16HImode)))
5854#define HAVE_vconduv16hiv8si (TARGET_AVX2 \
5855 && (GET_MODE_NUNITS (V16HImode) \
5856 == GET_MODE_NUNITS (V8SImode)))
5857#define HAVE_vconduv16hiv4di (TARGET_AVX2 \
5858 && (GET_MODE_NUNITS (V16HImode) \
5859 == GET_MODE_NUNITS (V4DImode)))
5860#define HAVE_vconduv8siv32qi (TARGET_AVX2 \
5861 && (GET_MODE_NUNITS (V8SImode) \
5862 == GET_MODE_NUNITS (V32QImode)))
5863#define HAVE_vconduv8siv16hi (TARGET_AVX2 \
5864 && (GET_MODE_NUNITS (V8SImode) \
5865 == GET_MODE_NUNITS (V16HImode)))
5866#define HAVE_vconduv8siv8si (TARGET_AVX2 \
5867 && (GET_MODE_NUNITS (V8SImode) \
5868 == GET_MODE_NUNITS (V8SImode)))
5869#define HAVE_vconduv8siv4di (TARGET_AVX2 \
5870 && (GET_MODE_NUNITS (V8SImode) \
5871 == GET_MODE_NUNITS (V4DImode)))
5872#define HAVE_vconduv4div32qi (TARGET_AVX2 \
5873 && (GET_MODE_NUNITS (V4DImode) \
5874 == GET_MODE_NUNITS (V32QImode)))
5875#define HAVE_vconduv4div16hi (TARGET_AVX2 \
5876 && (GET_MODE_NUNITS (V4DImode) \
5877 == GET_MODE_NUNITS (V16HImode)))
5878#define HAVE_vconduv4div8si (TARGET_AVX2 \
5879 && (GET_MODE_NUNITS (V4DImode) \
5880 == GET_MODE_NUNITS (V8SImode)))
5881#define HAVE_vconduv4div4di (TARGET_AVX2 \
5882 && (GET_MODE_NUNITS (V4DImode) \
5883 == GET_MODE_NUNITS (V4DImode)))
5884#define HAVE_vconduv8sfv32qi (TARGET_AVX2 \
5885 && (GET_MODE_NUNITS (V8SFmode) \
5886 == GET_MODE_NUNITS (V32QImode)))
5887#define HAVE_vconduv8sfv16hi (TARGET_AVX2 \
5888 && (GET_MODE_NUNITS (V8SFmode) \
5889 == GET_MODE_NUNITS (V16HImode)))
5890#define HAVE_vconduv8sfv8si (TARGET_AVX2 \
5891 && (GET_MODE_NUNITS (V8SFmode) \
5892 == GET_MODE_NUNITS (V8SImode)))
5893#define HAVE_vconduv8sfv4di (TARGET_AVX2 \
5894 && (GET_MODE_NUNITS (V8SFmode) \
5895 == GET_MODE_NUNITS (V4DImode)))
5896#define HAVE_vconduv4dfv32qi (TARGET_AVX2 \
5897 && (GET_MODE_NUNITS (V4DFmode) \
5898 == GET_MODE_NUNITS (V32QImode)))
5899#define HAVE_vconduv4dfv16hi (TARGET_AVX2 \
5900 && (GET_MODE_NUNITS (V4DFmode) \
5901 == GET_MODE_NUNITS (V16HImode)))
5902#define HAVE_vconduv4dfv8si (TARGET_AVX2 \
5903 && (GET_MODE_NUNITS (V4DFmode) \
5904 == GET_MODE_NUNITS (V8SImode)))
5905#define HAVE_vconduv4dfv4di (TARGET_AVX2 \
5906 && (GET_MODE_NUNITS (V4DFmode) \
5907 == GET_MODE_NUNITS (V4DImode)))
5908#define HAVE_vconduv16qiv16qi (TARGET_SSE2 \
5909 && (GET_MODE_NUNITS (V16QImode) \
5910 == GET_MODE_NUNITS (V16QImode)))
5911#define HAVE_vconduv8hiv16qi (TARGET_SSE2 \
5912 && (GET_MODE_NUNITS (V8HImode) \
5913 == GET_MODE_NUNITS (V16QImode)))
5914#define HAVE_vconduv4siv16qi (TARGET_SSE2 \
5915 && (GET_MODE_NUNITS (V4SImode) \
5916 == GET_MODE_NUNITS (V16QImode)))
5917#define HAVE_vconduv2div16qi (TARGET_SSE2 \
5918 && (GET_MODE_NUNITS (V2DImode) \
5919 == GET_MODE_NUNITS (V16QImode)))
5920#define HAVE_vconduv4sfv16qi (TARGET_SSE2 \
5921 && (GET_MODE_NUNITS (V4SFmode) \
5922 == GET_MODE_NUNITS (V16QImode)))
5923#define HAVE_vconduv2dfv16qi ((TARGET_SSE2 \
5924 && (GET_MODE_NUNITS (V2DFmode) \
5925 == GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE2))
5926#define HAVE_vconduv16qiv8hi (TARGET_SSE2 \
5927 && (GET_MODE_NUNITS (V16QImode) \
5928 == GET_MODE_NUNITS (V8HImode)))
5929#define HAVE_vconduv8hiv8hi (TARGET_SSE2 \
5930 && (GET_MODE_NUNITS (V8HImode) \
5931 == GET_MODE_NUNITS (V8HImode)))
5932#define HAVE_vconduv4siv8hi (TARGET_SSE2 \
5933 && (GET_MODE_NUNITS (V4SImode) \
5934 == GET_MODE_NUNITS (V8HImode)))
5935#define HAVE_vconduv2div8hi (TARGET_SSE2 \
5936 && (GET_MODE_NUNITS (V2DImode) \
5937 == GET_MODE_NUNITS (V8HImode)))
5938#define HAVE_vconduv4sfv8hi (TARGET_SSE2 \
5939 && (GET_MODE_NUNITS (V4SFmode) \
5940 == GET_MODE_NUNITS (V8HImode)))
5941#define HAVE_vconduv2dfv8hi ((TARGET_SSE2 \
5942 && (GET_MODE_NUNITS (V2DFmode) \
5943 == GET_MODE_NUNITS (V8HImode))) && (TARGET_SSE2))
5944#define HAVE_vconduv16qiv4si (TARGET_SSE2 \
5945 && (GET_MODE_NUNITS (V16QImode) \
5946 == GET_MODE_NUNITS (V4SImode)))
5947#define HAVE_vconduv8hiv4si (TARGET_SSE2 \
5948 && (GET_MODE_NUNITS (V8HImode) \
5949 == GET_MODE_NUNITS (V4SImode)))
5950#define HAVE_vconduv4siv4si (TARGET_SSE2 \
5951 && (GET_MODE_NUNITS (V4SImode) \
5952 == GET_MODE_NUNITS (V4SImode)))
5953#define HAVE_vconduv2div4si (TARGET_SSE2 \
5954 && (GET_MODE_NUNITS (V2DImode) \
5955 == GET_MODE_NUNITS (V4SImode)))
5956#define HAVE_vconduv4sfv4si (TARGET_SSE2 \
5957 && (GET_MODE_NUNITS (V4SFmode) \
5958 == GET_MODE_NUNITS (V4SImode)))
5959#define HAVE_vconduv2dfv4si ((TARGET_SSE2 \
5960 && (GET_MODE_NUNITS (V2DFmode) \
5961 == GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE2))
5962#define HAVE_vconduv2div2di (TARGET_SSE4_2)
5963#define HAVE_vconduv2dfv2di (TARGET_SSE4_2)
5964#define HAVE_vcondeqv2div2di (TARGET_SSE4_1)
5965#define HAVE_vcondeqv2dfv2di (TARGET_SSE4_1)
5966#define HAVE_vec_permv16qi (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5967#define HAVE_vec_permv8hi (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5968#define HAVE_vec_permv4si (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5969#define HAVE_vec_permv2di (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5970#define HAVE_vec_permv4sf (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5971#define HAVE_vec_permv2df (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
5972#define HAVE_vec_permv32qi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5973#define HAVE_vec_permv16hi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5974#define HAVE_vec_permv8si ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5975#define HAVE_vec_permv4di ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5976#define HAVE_vec_permv8sf ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5977#define HAVE_vec_permv4df ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
5978#define HAVE_vec_permv16sf ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
5979#define HAVE_vec_permv8df ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
5980#define HAVE_vec_permv16si ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
5981#define HAVE_vec_permv8di ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
5982#define HAVE_vec_permv32hi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512BW))
5983#define HAVE_vec_permv64qi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512VBMI))
5984#define HAVE_vec_perm_constv4sf (TARGET_SSE)
5985#define HAVE_vec_perm_constv4si (TARGET_SSE)
5986#define HAVE_vec_perm_constv2df (TARGET_SSE)
5987#define HAVE_vec_perm_constv2di (TARGET_SSE)
5988#define HAVE_vec_perm_constv16qi (TARGET_SSE2)
5989#define HAVE_vec_perm_constv8hi (TARGET_SSE2)
5990#define HAVE_vec_perm_constv8sf (TARGET_AVX)
5991#define HAVE_vec_perm_constv4df (TARGET_AVX)
5992#define HAVE_vec_perm_constv8si (TARGET_AVX)
5993#define HAVE_vec_perm_constv4di (TARGET_AVX)
5994#define HAVE_vec_perm_constv32qi (TARGET_AVX2)
5995#define HAVE_vec_perm_constv16hi (TARGET_AVX2)
5996#define HAVE_vec_perm_constv16si (TARGET_AVX512F)
5997#define HAVE_vec_perm_constv8di (TARGET_AVX512F)
5998#define HAVE_vec_perm_constv16sf (TARGET_AVX512F)
5999#define HAVE_vec_perm_constv8df (TARGET_AVX512F)
6000#define HAVE_vec_perm_constv32hi (TARGET_AVX512BW)
6001#define HAVE_vec_perm_constv64qi (TARGET_AVX512BW)
6002#define HAVE_one_cmplv16si2 ((TARGET_SSE) && (TARGET_AVX512F))
6003#define HAVE_one_cmplv8di2 ((TARGET_SSE) && (TARGET_AVX512F))
6004#define HAVE_one_cmplv64qi2 ((TARGET_SSE) && (TARGET_AVX512BW))
6005#define HAVE_one_cmplv32qi2 ((TARGET_SSE) && (TARGET_AVX))
6006#define HAVE_one_cmplv16qi2 (TARGET_SSE)
6007#define HAVE_one_cmplv32hi2 ((TARGET_SSE) && (TARGET_AVX512BW))
6008#define HAVE_one_cmplv16hi2 ((TARGET_SSE) && (TARGET_AVX))
6009#define HAVE_one_cmplv8hi2 (TARGET_SSE)
6010#define HAVE_one_cmplv8si2 ((TARGET_SSE) && (TARGET_AVX))
6011#define HAVE_one_cmplv4si2 (TARGET_SSE)
6012#define HAVE_one_cmplv4di2 ((TARGET_SSE) && (TARGET_AVX))
6013#define HAVE_one_cmplv2di2 (TARGET_SSE)
6014#define HAVE_avx512bw_andnotv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
6015#define HAVE_avx2_andnotv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
6016#define HAVE_sse2_andnotv16qi3 (TARGET_SSE2)
6017#define HAVE_avx512bw_andnotv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
6018#define HAVE_avx2_andnotv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
6019#define HAVE_sse2_andnotv8hi3 (TARGET_SSE2)
6020#define HAVE_avx512f_andnotv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
6021#define HAVE_avx2_andnotv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
6022#define HAVE_sse2_andnotv4si3 (TARGET_SSE2)
6023#define HAVE_avx512f_andnotv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
6024#define HAVE_avx2_andnotv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
6025#define HAVE_sse2_andnotv2di3 (TARGET_SSE2)
6026#define HAVE_avx512f_andnotv16si3_mask (TARGET_AVX512F)
6027#define HAVE_avx2_andnotv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
6028#define HAVE_sse2_andnotv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
6029#define HAVE_avx512f_andnotv8di3_mask (TARGET_AVX512F)
6030#define HAVE_avx2_andnotv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
6031#define HAVE_sse2_andnotv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
6032#define HAVE_avx512bw_andnotv64qi3_mask (TARGET_AVX512BW)
6033#define HAVE_sse2_andnotv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6034#define HAVE_avx2_andnotv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6035#define HAVE_avx512bw_andnotv32hi3_mask (TARGET_AVX512BW)
6036#define HAVE_avx2_andnotv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6037#define HAVE_sse2_andnotv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6038#define HAVE_andv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
6039#define HAVE_iorv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
6040#define HAVE_xorv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
6041#define HAVE_andv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
6042#define HAVE_iorv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
6043#define HAVE_xorv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
6044#define HAVE_andv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6045#define HAVE_iorv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6046#define HAVE_xorv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6047#define HAVE_andv32qi3 ((TARGET_SSE) && (TARGET_AVX))
6048#define HAVE_iorv32qi3 ((TARGET_SSE) && (TARGET_AVX))
6049#define HAVE_xorv32qi3 ((TARGET_SSE) && (TARGET_AVX))
6050#define HAVE_andv16qi3 (TARGET_SSE)
6051#define HAVE_iorv16qi3 (TARGET_SSE)
6052#define HAVE_xorv16qi3 (TARGET_SSE)
6053#define HAVE_andv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6054#define HAVE_iorv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6055#define HAVE_xorv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
6056#define HAVE_andv16hi3 ((TARGET_SSE) && (TARGET_AVX))
6057#define HAVE_iorv16hi3 ((TARGET_SSE) && (TARGET_AVX))
6058#define HAVE_xorv16hi3 ((TARGET_SSE) && (TARGET_AVX))
6059#define HAVE_andv8hi3 (TARGET_SSE)
6060#define HAVE_iorv8hi3 (TARGET_SSE)
6061#define HAVE_xorv8hi3 (TARGET_SSE)
6062#define HAVE_andv8si3 ((TARGET_SSE) && (TARGET_AVX))
6063#define HAVE_iorv8si3 ((TARGET_SSE) && (TARGET_AVX))
6064#define HAVE_xorv8si3 ((TARGET_SSE) && (TARGET_AVX))
6065#define HAVE_andv4si3 (TARGET_SSE)
6066#define HAVE_iorv4si3 (TARGET_SSE)
6067#define HAVE_xorv4si3 (TARGET_SSE)
6068#define HAVE_andv4di3 ((TARGET_SSE) && (TARGET_AVX))
6069#define HAVE_iorv4di3 ((TARGET_SSE) && (TARGET_AVX))
6070#define HAVE_xorv4di3 ((TARGET_SSE) && (TARGET_AVX))
6071#define HAVE_andv2di3 (TARGET_SSE)
6072#define HAVE_iorv2di3 (TARGET_SSE)
6073#define HAVE_xorv2di3 (TARGET_SSE)
6074#define HAVE_vec_pack_trunc_v32hi ((TARGET_SSE2) && (TARGET_AVX512BW))
6075#define HAVE_vec_pack_trunc_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
6076#define HAVE_vec_pack_trunc_v8hi (TARGET_SSE2)
6077#define HAVE_vec_pack_trunc_v16si ((TARGET_SSE2) && (TARGET_AVX512BW))
6078#define HAVE_vec_pack_trunc_v8si ((TARGET_SSE2) && (TARGET_AVX2))
6079#define HAVE_vec_pack_trunc_v4si (TARGET_SSE2)
6080#define HAVE_vec_pack_trunc_v8di ((TARGET_SSE2) && (TARGET_AVX512F))
6081#define HAVE_vec_pack_trunc_v4di ((TARGET_SSE2) && (TARGET_AVX2))
6082#define HAVE_vec_pack_trunc_v2di (TARGET_SSE2)
6083#define HAVE_vec_pack_trunc_qi (TARGET_AVX512F)
6084#define HAVE_vec_pack_trunc_hi (TARGET_AVX512BW)
6085#define HAVE_vec_pack_trunc_si (TARGET_AVX512BW)
6086#define HAVE_vec_interleave_highv32qi (TARGET_AVX2)
6087#define HAVE_vec_interleave_highv16hi (TARGET_AVX2)
6088#define HAVE_vec_interleave_highv8si (TARGET_AVX2)
6089#define HAVE_vec_interleave_highv4di (TARGET_AVX2)
6090#define HAVE_vec_interleave_lowv32qi (TARGET_AVX2)
6091#define HAVE_vec_interleave_lowv16hi (TARGET_AVX2)
6092#define HAVE_vec_interleave_lowv8si (TARGET_AVX2)
6093#define HAVE_vec_interleave_lowv4di (TARGET_AVX2)
6094#define HAVE_avx512dq_vinsertf64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
6095#define HAVE_avx512dq_vinserti64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
6096#define HAVE_avx512f_vinsertf32x4_mask (TARGET_AVX512F)
6097#define HAVE_avx512f_vinserti32x4_mask (TARGET_AVX512F)
6098#define HAVE_avx512dq_vinsertf32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
6099#define HAVE_avx512dq_vinserti32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
6100#define HAVE_avx512f_vinsertf64x4_mask (TARGET_AVX512F)
6101#define HAVE_avx512f_vinserti64x4_mask (TARGET_AVX512F)
6102#define HAVE_avx512dq_shuf_i64x2_mask (TARGET_AVX512DQ)
6103#define HAVE_avx512dq_shuf_f64x2_mask (TARGET_AVX512DQ)
6104#define HAVE_avx512f_shuf_f64x2_mask (TARGET_AVX512F)
6105#define HAVE_avx512f_shuf_i64x2_mask (TARGET_AVX512F)
6106#define HAVE_avx512vl_shuf_i32x4_mask (TARGET_AVX512VL)
6107#define HAVE_avx512vl_shuf_f32x4_mask (TARGET_AVX512VL)
6108#define HAVE_avx512f_shuf_f32x4_mask (TARGET_AVX512F)
6109#define HAVE_avx512f_shuf_i32x4_mask (TARGET_AVX512F)
6110#define HAVE_avx512f_pshufdv3_mask (TARGET_AVX512F)
6111#define HAVE_avx512vl_pshufdv3_mask (TARGET_AVX512VL)
6112#define HAVE_avx2_pshufdv3 (TARGET_AVX2)
6113#define HAVE_avx512vl_pshufd_mask (TARGET_AVX512VL)
6114#define HAVE_sse2_pshufd (TARGET_SSE2)
6115#define HAVE_avx512vl_pshuflwv3_mask (TARGET_AVX512VL && TARGET_AVX512BW)
6116#define HAVE_avx2_pshuflwv3 (TARGET_AVX2)
6117#define HAVE_avx512vl_pshuflw_mask (TARGET_AVX512VL && TARGET_AVX512BW)
6118#define HAVE_sse2_pshuflw (TARGET_SSE2)
6119#define HAVE_avx2_pshufhwv3 (TARGET_AVX2)
6120#define HAVE_avx512vl_pshufhwv3_mask (TARGET_AVX512VL && TARGET_AVX512BW)
6121#define HAVE_avx512vl_pshufhw_mask (TARGET_AVX512VL && TARGET_AVX512BW)
6122#define HAVE_sse2_pshufhw (TARGET_SSE2)
6123#define HAVE_sse2_loadd (TARGET_SSE)
6124#define HAVE_vec_unpacks_lo_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
6125#define HAVE_vec_unpacks_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
6126#define HAVE_vec_unpacks_lo_v16qi (TARGET_SSE2)
6127#define HAVE_vec_unpacks_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
6128#define HAVE_vec_unpacks_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
6129#define HAVE_vec_unpacks_lo_v8hi (TARGET_SSE2)
6130#define HAVE_vec_unpacks_lo_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
6131#define HAVE_vec_unpacks_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
6132#define HAVE_vec_unpacks_lo_v4si (TARGET_SSE2)
6133#define HAVE_vec_unpacks_hi_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
6134#define HAVE_vec_unpacks_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
6135#define HAVE_vec_unpacks_hi_v16qi (TARGET_SSE2)
6136#define HAVE_vec_unpacks_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
6137#define HAVE_vec_unpacks_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
6138#define HAVE_vec_unpacks_hi_v8hi (TARGET_SSE2)
6139#define HAVE_vec_unpacks_hi_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
6140#define HAVE_vec_unpacks_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
6141#define HAVE_vec_unpacks_hi_v4si (TARGET_SSE2)
6142#define HAVE_vec_unpacku_lo_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
6143#define HAVE_vec_unpacku_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
6144#define HAVE_vec_unpacku_lo_v16qi (TARGET_SSE2)
6145#define HAVE_vec_unpacku_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
6146#define HAVE_vec_unpacku_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
6147#define HAVE_vec_unpacku_lo_v8hi (TARGET_SSE2)
6148#define HAVE_vec_unpacku_lo_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
6149#define HAVE_vec_unpacku_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
6150#define HAVE_vec_unpacku_lo_v4si (TARGET_SSE2)
6151#define HAVE_vec_unpacks_lo_hi (TARGET_AVX512F)
6152#define HAVE_vec_unpacks_lo_si (TARGET_AVX512F)
6153#define HAVE_vec_unpacks_lo_di (TARGET_AVX512BW)
6154#define HAVE_vec_unpacku_hi_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
6155#define HAVE_vec_unpacku_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
6156#define HAVE_vec_unpacku_hi_v16qi (TARGET_SSE2)
6157#define HAVE_vec_unpacku_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
6158#define HAVE_vec_unpacku_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
6159#define HAVE_vec_unpacku_hi_v8hi (TARGET_SSE2)
6160#define HAVE_vec_unpacku_hi_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
6161#define HAVE_vec_unpacku_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
6162#define HAVE_vec_unpacku_hi_v4si (TARGET_SSE2)
6163#define HAVE_vec_unpacks_hi_hi (TARGET_AVX512F)
6164#define HAVE_vec_unpacks_hi_si (TARGET_AVX512BW)
6165#define HAVE_vec_unpacks_hi_di (TARGET_AVX512BW)
6166#define HAVE_avx512bw_uavgv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
6167#define HAVE_avx512bw_uavgv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
6168#define HAVE_avx2_uavgv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
6169#define HAVE_avx2_uavgv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
6170#define HAVE_sse2_uavgv16qi3 (TARGET_SSE2 && 1 && 1)
6171#define HAVE_sse2_uavgv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
6172#define HAVE_avx512bw_uavgv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
6173#define HAVE_avx512bw_uavgv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
6174#define HAVE_avx2_uavgv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
6175#define HAVE_avx2_uavgv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
6176#define HAVE_sse2_uavgv8hi3 (TARGET_SSE2 && 1 && 1)
6177#define HAVE_sse2_uavgv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
6178#define HAVE_sse2_maskmovdqu (TARGET_SSE2)
6179#define HAVE_ssse3_pmulhrswv4hi3_mask (TARGET_AVX512BW && TARGET_AVX512VL)
6180#define HAVE_ssse3_pmulhrswv8hi3_mask (TARGET_AVX512BW && TARGET_AVX512VL)
6181#define HAVE_avx2_pmulhrswv16hi3_mask ((TARGET_AVX512BW && TARGET_AVX512VL) && (TARGET_AVX2))
6182#define HAVE_ssse3_pmulhrswv4hi3 (TARGET_AVX2)
6183#define HAVE_ssse3_pmulhrswv8hi3 (TARGET_AVX2)
6184#define HAVE_avx2_pmulhrswv16hi3 (TARGET_AVX2)
6185#define HAVE_absv64qi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
6186#define HAVE_absv32qi2 ((TARGET_SSE2) && (TARGET_AVX2))
6187#define HAVE_absv16qi2 (TARGET_SSE2)
6188#define HAVE_absv32hi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
6189#define HAVE_absv16hi2 ((TARGET_SSE2) && (TARGET_AVX2))
6190#define HAVE_absv8hi2 (TARGET_SSE2)
6191#define HAVE_absv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
6192#define HAVE_absv8si2 ((TARGET_SSE2) && (TARGET_AVX2))
6193#define HAVE_absv4si2 (TARGET_SSE2)
6194#define HAVE_absv8di2 ((TARGET_SSE2) && (TARGET_AVX512F))
6195#define HAVE_absv4di2 ((TARGET_SSE2) && (TARGET_AVX512VL))
6196#define HAVE_absv2di2 ((TARGET_SSE2) && (TARGET_AVX512VL))
6197#define HAVE_avx2_pblendw (TARGET_AVX2)
6198#define HAVE_avx_roundps_sfix256 ((TARGET_ROUND) && (TARGET_AVX))
6199#define HAVE_sse4_1_roundps_sfix (TARGET_ROUND)
6200#define HAVE_avx512f_roundps512 (TARGET_AVX512F)
6201#define HAVE_avx512f_roundpd512 (TARGET_AVX512F)
6202#define HAVE_avx512f_roundps512_sfix (TARGET_AVX512F)
6203#define HAVE_avx512f_roundpd_vec_pack_sfix512 ((TARGET_ROUND) && (TARGET_AVX512F))
6204#define HAVE_avx_roundpd_vec_pack_sfix256 ((TARGET_ROUND) && (TARGET_AVX))
6205#define HAVE_sse4_1_roundpd_vec_pack_sfix (TARGET_ROUND)
6206#define HAVE_roundv16sf2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
6207#define HAVE_roundv8sf2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
6208#define HAVE_roundv4sf2 (TARGET_ROUND && !flag_trapping_math)
6209#define HAVE_roundv8df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
6210#define HAVE_roundv4df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
6211#define HAVE_roundv2df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_SSE2))
6212#define HAVE_roundv16sf2_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
6213#define HAVE_roundv8sf2_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
6214#define HAVE_roundv4sf2_sfix (TARGET_ROUND && !flag_trapping_math)
6215#define HAVE_roundv8df2_vec_pack_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
6216#define HAVE_roundv4df2_vec_pack_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
6217#define HAVE_roundv2df2_vec_pack_sfix (TARGET_ROUND && !flag_trapping_math)
6218#define HAVE_avx512pf_gatherpfv16sisf (TARGET_AVX512PF)
6219#define HAVE_avx512pf_gatherpfv8disf (TARGET_AVX512PF)
6220#define HAVE_avx512pf_gatherpfv8sidf (TARGET_AVX512PF)
6221#define HAVE_avx512pf_gatherpfv8didf (TARGET_AVX512PF)
6222#define HAVE_avx512pf_scatterpfv16sisf (TARGET_AVX512PF)
6223#define HAVE_avx512pf_scatterpfv8disf (TARGET_AVX512PF)
6224#define HAVE_avx512pf_scatterpfv8sidf (TARGET_AVX512PF)
6225#define HAVE_avx512pf_scatterpfv8didf (TARGET_AVX512PF)
6226#define HAVE_rotlv16qi3 (TARGET_XOP)
6227#define HAVE_rotlv8hi3 (TARGET_XOP)
6228#define HAVE_rotlv4si3 (TARGET_XOP)
6229#define HAVE_rotlv2di3 (TARGET_XOP)
6230#define HAVE_rotrv16qi3 (TARGET_XOP)
6231#define HAVE_rotrv8hi3 (TARGET_XOP)
6232#define HAVE_rotrv4si3 (TARGET_XOP)
6233#define HAVE_rotrv2di3 (TARGET_XOP)
6234#define HAVE_vrotrv16qi3 (TARGET_XOP)
6235#define HAVE_vrotrv8hi3 (TARGET_XOP)
6236#define HAVE_vrotrv4si3 (TARGET_XOP)
6237#define HAVE_vrotrv2di3 (TARGET_XOP)
6238#define HAVE_vrotlv16qi3 (TARGET_XOP)
6239#define HAVE_vrotlv8hi3 (TARGET_XOP)
6240#define HAVE_vrotlv4si3 (TARGET_XOP)
6241#define HAVE_vrotlv2di3 (TARGET_XOP)
6242#define HAVE_vlshrv16qi3 (TARGET_XOP)
6243#define HAVE_vlshrv8hi3 (TARGET_XOP)
6244#define HAVE_vlshrv4si3 (TARGET_AVX2 || TARGET_XOP)
6245#define HAVE_vlshrv2di3 (TARGET_AVX2 || TARGET_XOP)
6246#define HAVE_vlshrv16si3 (TARGET_AVX512F)
6247#define HAVE_vlshrv8di3 (TARGET_AVX512F)
6248#define HAVE_vlshrv8si3 (TARGET_AVX2)
6249#define HAVE_vlshrv4di3 (TARGET_AVX2)
6250#define HAVE_vashrv8hi3 (TARGET_XOP || (TARGET_AVX512BW && TARGET_AVX512VL))
6251#define HAVE_vashrv8hi3_mask ((TARGET_AVX512F) && (TARGET_XOP || (TARGET_AVX512BW && TARGET_AVX512VL)))
6252#define HAVE_vashrv16qi3 (TARGET_XOP)
6253#define HAVE_vashrv2di3 (TARGET_XOP || TARGET_AVX512VL)
6254#define HAVE_vashrv2di3_mask ((TARGET_AVX512F) && (TARGET_XOP || TARGET_AVX512VL))
6255#define HAVE_vashrv4si3 (TARGET_AVX2 || TARGET_XOP)
6256#define HAVE_vashrv16si3 (TARGET_AVX512F)
6257#define HAVE_vashrv8si3 (TARGET_AVX2)
6258#define HAVE_vashlv16qi3 (TARGET_XOP)
6259#define HAVE_vashlv8hi3 (TARGET_XOP)
6260#define HAVE_vashlv4si3 (TARGET_AVX2 || TARGET_XOP)
6261#define HAVE_vashlv2di3 (TARGET_AVX2 || TARGET_XOP)
6262#define HAVE_vashlv16si3 (TARGET_AVX512F)
6263#define HAVE_vashlv8di3 (TARGET_AVX512F)
6264#define HAVE_vashlv8si3 (TARGET_AVX2)
6265#define HAVE_vashlv4di3 (TARGET_AVX2)
6266#define HAVE_ashlv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
6267#define HAVE_lshrv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
6268#define HAVE_ashrv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
6269#define HAVE_ashlv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
6270#define HAVE_lshrv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
6271#define HAVE_ashrv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
6272#define HAVE_ashlv16qi3 (TARGET_SSE2)
6273#define HAVE_lshrv16qi3 (TARGET_SSE2)
6274#define HAVE_ashrv16qi3 (TARGET_SSE2)
6275#define HAVE_ashrv2di3 (TARGET_XOP || TARGET_AVX512VL)
6276#define HAVE_xop_vmfrczv4sf2 (TARGET_XOP)
6277#define HAVE_xop_vmfrczv2df2 ((TARGET_XOP) && (TARGET_SSE2))
6278#define HAVE_avx_vzeroall (TARGET_AVX)
6279#define HAVE_avx2_permv4di (TARGET_AVX2)
6280#define HAVE_avx2_permv4df (TARGET_AVX2)
6281#define HAVE_avx512vl_permv4di_mask (TARGET_AVX512VL)
6282#define HAVE_avx512vl_permv4df_mask (TARGET_AVX512VL)
6283#define HAVE_avx512f_permv8df (TARGET_AVX512F)
6284#define HAVE_avx512f_permv8di (TARGET_AVX512F)
6285#define HAVE_avx512f_permv8df_mask (TARGET_AVX512F)
6286#define HAVE_avx512f_permv8di_mask (TARGET_AVX512F)
6287#define HAVE_avx512f_vpermilv8df ((TARGET_AVX && 1) && (TARGET_AVX512F))
6288#define HAVE_avx512f_vpermilv8df_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
6289#define HAVE_avx_vpermilv4df ((TARGET_AVX && 1) && (TARGET_AVX))
6290#define HAVE_avx_vpermilv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
6291#define HAVE_avx_vpermilv2df (TARGET_AVX && 1)
6292#define HAVE_avx_vpermilv2df_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
6293#define HAVE_avx512f_vpermilv16sf ((TARGET_AVX && 1) && (TARGET_AVX512F))
6294#define HAVE_avx512f_vpermilv16sf_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
6295#define HAVE_avx_vpermilv8sf ((TARGET_AVX && 1) && (TARGET_AVX))
6296#define HAVE_avx_vpermilv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
6297#define HAVE_avx_vpermilv4sf (TARGET_AVX && 1)
6298#define HAVE_avx_vpermilv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
6299#define HAVE_avx512f_vpermi2varv16si3_maskz (TARGET_AVX512F)
6300#define HAVE_avx512f_vpermi2varv16sf3_maskz (TARGET_AVX512F)
6301#define HAVE_avx512f_vpermi2varv8di3_maskz (TARGET_AVX512F)
6302#define HAVE_avx512f_vpermi2varv8df3_maskz (TARGET_AVX512F)
6303#define HAVE_avx512vl_vpermi2varv8si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6304#define HAVE_avx512vl_vpermi2varv8sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6305#define HAVE_avx512vl_vpermi2varv4di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6306#define HAVE_avx512vl_vpermi2varv4df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6307#define HAVE_avx512vl_vpermi2varv4si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6308#define HAVE_avx512vl_vpermi2varv4sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6309#define HAVE_avx512vl_vpermi2varv2di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6310#define HAVE_avx512vl_vpermi2varv2df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6311#define HAVE_avx512bw_vpermi2varv64qi3_maskz (TARGET_AVX512VBMI)
6312#define HAVE_avx512vl_vpermi2varv16qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
6313#define HAVE_avx512vl_vpermi2varv32qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
6314#define HAVE_avx512vl_vpermi2varv8hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6315#define HAVE_avx512vl_vpermi2varv16hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6316#define HAVE_avx512bw_vpermi2varv32hi3_maskz (TARGET_AVX512BW)
6317#define HAVE_avx512f_vpermt2varv16si3_maskz (TARGET_AVX512F)
6318#define HAVE_avx512f_vpermt2varv16sf3_maskz (TARGET_AVX512F)
6319#define HAVE_avx512f_vpermt2varv8di3_maskz (TARGET_AVX512F)
6320#define HAVE_avx512f_vpermt2varv8df3_maskz (TARGET_AVX512F)
6321#define HAVE_avx512vl_vpermt2varv8si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6322#define HAVE_avx512vl_vpermt2varv8sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6323#define HAVE_avx512vl_vpermt2varv4di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6324#define HAVE_avx512vl_vpermt2varv4df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6325#define HAVE_avx512vl_vpermt2varv4si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6326#define HAVE_avx512vl_vpermt2varv4sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6327#define HAVE_avx512vl_vpermt2varv2di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6328#define HAVE_avx512vl_vpermt2varv2df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6329#define HAVE_avx512bw_vpermt2varv64qi3_maskz (TARGET_AVX512VBMI)
6330#define HAVE_avx512vl_vpermt2varv16qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
6331#define HAVE_avx512vl_vpermt2varv32qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
6332#define HAVE_avx512vl_vpermt2varv8hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6333#define HAVE_avx512vl_vpermt2varv16hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6334#define HAVE_avx512bw_vpermt2varv32hi3_maskz (TARGET_AVX512BW)
6335#define HAVE_avx_vperm2f128v8si3 (TARGET_AVX)
6336#define HAVE_avx_vperm2f128v8sf3 (TARGET_AVX)
6337#define HAVE_avx_vperm2f128v4df3 (TARGET_AVX)
6338#define HAVE_avx512vl_vinsertv8si (TARGET_AVX512VL)
6339#define HAVE_avx512vl_vinsertv8sf (TARGET_AVX512VL)
6340#define HAVE_avx512vl_vinsertv4di (TARGET_AVX512VL)
6341#define HAVE_avx512vl_vinsertv4df (TARGET_AVX512VL)
6342#define HAVE_avx_vinsertf128v32qi (TARGET_AVX)
6343#define HAVE_avx_vinsertf128v16hi (TARGET_AVX)
6344#define HAVE_avx_vinsertf128v8si (TARGET_AVX)
6345#define HAVE_avx_vinsertf128v4di (TARGET_AVX)
6346#define HAVE_avx_vinsertf128v8sf (TARGET_AVX)
6347#define HAVE_avx_vinsertf128v4df (TARGET_AVX)
6348#define HAVE_maskloadv4sfv4si (TARGET_AVX)
6349#define HAVE_maskloadv2dfv2di (TARGET_AVX)
6350#define HAVE_maskloadv8sfv8si (TARGET_AVX)
6351#define HAVE_maskloadv4dfv4di (TARGET_AVX)
6352#define HAVE_maskloadv4siv4si ((TARGET_AVX) && (TARGET_AVX2))
6353#define HAVE_maskloadv2div2di ((TARGET_AVX) && (TARGET_AVX2))
6354#define HAVE_maskloadv8siv8si ((TARGET_AVX) && (TARGET_AVX2))
6355#define HAVE_maskloadv4div4di ((TARGET_AVX) && (TARGET_AVX2))
6356#define HAVE_maskloadv16sihi (TARGET_AVX512F)
6357#define HAVE_maskloadv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6358#define HAVE_maskloadv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6359#define HAVE_maskloadv8diqi (TARGET_AVX512F)
6360#define HAVE_maskloadv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6361#define HAVE_maskloadv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6362#define HAVE_maskloadv16sfhi (TARGET_AVX512F)
6363#define HAVE_maskloadv8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6364#define HAVE_maskloadv4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6365#define HAVE_maskloadv8dfqi (TARGET_AVX512F)
6366#define HAVE_maskloadv4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6367#define HAVE_maskloadv2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6368#define HAVE_maskloadv64qidi (TARGET_AVX512BW)
6369#define HAVE_maskloadv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6370#define HAVE_maskloadv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6371#define HAVE_maskloadv32hisi (TARGET_AVX512BW)
6372#define HAVE_maskloadv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6373#define HAVE_maskloadv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6374#define HAVE_maskstorev4sfv4si (TARGET_AVX)
6375#define HAVE_maskstorev2dfv2di (TARGET_AVX)
6376#define HAVE_maskstorev8sfv8si (TARGET_AVX)
6377#define HAVE_maskstorev4dfv4di (TARGET_AVX)
6378#define HAVE_maskstorev4siv4si ((TARGET_AVX) && (TARGET_AVX2))
6379#define HAVE_maskstorev2div2di ((TARGET_AVX) && (TARGET_AVX2))
6380#define HAVE_maskstorev8siv8si ((TARGET_AVX) && (TARGET_AVX2))
6381#define HAVE_maskstorev4div4di ((TARGET_AVX) && (TARGET_AVX2))
6382#define HAVE_maskstorev16sihi (TARGET_AVX512F)
6383#define HAVE_maskstorev8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6384#define HAVE_maskstorev4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6385#define HAVE_maskstorev8diqi (TARGET_AVX512F)
6386#define HAVE_maskstorev4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6387#define HAVE_maskstorev2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6388#define HAVE_maskstorev16sfhi (TARGET_AVX512F)
6389#define HAVE_maskstorev8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6390#define HAVE_maskstorev4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6391#define HAVE_maskstorev8dfqi (TARGET_AVX512F)
6392#define HAVE_maskstorev4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6393#define HAVE_maskstorev2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
6394#define HAVE_maskstorev64qidi (TARGET_AVX512BW)
6395#define HAVE_maskstorev16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6396#define HAVE_maskstorev32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6397#define HAVE_maskstorev32hisi (TARGET_AVX512BW)
6398#define HAVE_maskstorev16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6399#define HAVE_maskstorev8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
6400#define HAVE_cbranchv4si4 (TARGET_SSE4_1)
6401#define HAVE_cbranchv2di4 (TARGET_SSE4_1)
6402#define HAVE_cbranchv8si4 ((TARGET_SSE4_1) && (TARGET_AVX))
6403#define HAVE_cbranchv4di4 ((TARGET_SSE4_1) && (TARGET_AVX))
6404#define HAVE_vec_initv32qi (TARGET_AVX)
6405#define HAVE_vec_initv16hi (TARGET_AVX)
6406#define HAVE_vec_initv8si (TARGET_AVX)
6407#define HAVE_vec_initv4di (TARGET_AVX)
6408#define HAVE_vec_initv8sf (TARGET_AVX)
6409#define HAVE_vec_initv4df (TARGET_AVX)
6410#define HAVE_vec_initv16si (TARGET_AVX512F)
6411#define HAVE_vec_initv16sf (TARGET_AVX512F)
6412#define HAVE_vec_initv8di (TARGET_AVX512F)
6413#define HAVE_vec_initv8df (TARGET_AVX512F)
6414#define HAVE_vec_initv32hi (TARGET_AVX512F)
6415#define HAVE_vec_initv64qi (TARGET_AVX512F)
6416#define HAVE_vcvtps2ph_mask (TARGET_AVX512VL)
6417#define HAVE_vcvtps2ph (TARGET_F16C)
6418#define HAVE_avx2_gathersiv2di (TARGET_AVX2)
6419#define HAVE_avx2_gathersiv2df (TARGET_AVX2)
6420#define HAVE_avx2_gathersiv4di (TARGET_AVX2)
6421#define HAVE_avx2_gathersiv4df (TARGET_AVX2)
6422#define HAVE_avx2_gathersiv4si (TARGET_AVX2)
6423#define HAVE_avx2_gathersiv4sf (TARGET_AVX2)
6424#define HAVE_avx2_gathersiv8si (TARGET_AVX2)
6425#define HAVE_avx2_gathersiv8sf (TARGET_AVX2)
6426#define HAVE_avx2_gatherdiv2di (TARGET_AVX2)
6427#define HAVE_avx2_gatherdiv2df (TARGET_AVX2)
6428#define HAVE_avx2_gatherdiv4di (TARGET_AVX2)
6429#define HAVE_avx2_gatherdiv4df (TARGET_AVX2)
6430#define HAVE_avx2_gatherdiv4si (TARGET_AVX2)
6431#define HAVE_avx2_gatherdiv4sf (TARGET_AVX2)
6432#define HAVE_avx2_gatherdiv8si (TARGET_AVX2)
6433#define HAVE_avx2_gatherdiv8sf (TARGET_AVX2)
6434#define HAVE_avx512f_gathersiv16si (TARGET_AVX512F)
6435#define HAVE_avx512f_gathersiv16sf (TARGET_AVX512F)
6436#define HAVE_avx512f_gathersiv8di (TARGET_AVX512F)
6437#define HAVE_avx512f_gathersiv8df (TARGET_AVX512F)
6438#define HAVE_avx512vl_gathersiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6439#define HAVE_avx512vl_gathersiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6440#define HAVE_avx512vl_gathersiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6441#define HAVE_avx512vl_gathersiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6442#define HAVE_avx512vl_gathersiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6443#define HAVE_avx512vl_gathersiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6444#define HAVE_avx512vl_gathersiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6445#define HAVE_avx512vl_gathersiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6446#define HAVE_avx512f_gatherdiv16si (TARGET_AVX512F)
6447#define HAVE_avx512f_gatherdiv16sf (TARGET_AVX512F)
6448#define HAVE_avx512f_gatherdiv8di (TARGET_AVX512F)
6449#define HAVE_avx512f_gatherdiv8df (TARGET_AVX512F)
6450#define HAVE_avx512vl_gatherdiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6451#define HAVE_avx512vl_gatherdiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6452#define HAVE_avx512vl_gatherdiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6453#define HAVE_avx512vl_gatherdiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6454#define HAVE_avx512vl_gatherdiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6455#define HAVE_avx512vl_gatherdiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6456#define HAVE_avx512vl_gatherdiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6457#define HAVE_avx512vl_gatherdiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6458#define HAVE_avx512f_scattersiv16si (TARGET_AVX512F)
6459#define HAVE_avx512f_scattersiv16sf (TARGET_AVX512F)
6460#define HAVE_avx512f_scattersiv8di (TARGET_AVX512F)
6461#define HAVE_avx512f_scattersiv8df (TARGET_AVX512F)
6462#define HAVE_avx512vl_scattersiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6463#define HAVE_avx512vl_scattersiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6464#define HAVE_avx512vl_scattersiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6465#define HAVE_avx512vl_scattersiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6466#define HAVE_avx512vl_scattersiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6467#define HAVE_avx512vl_scattersiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6468#define HAVE_avx512vl_scattersiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6469#define HAVE_avx512vl_scattersiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6470#define HAVE_avx512f_scatterdiv16si (TARGET_AVX512F)
6471#define HAVE_avx512f_scatterdiv16sf (TARGET_AVX512F)
6472#define HAVE_avx512f_scatterdiv8di (TARGET_AVX512F)
6473#define HAVE_avx512f_scatterdiv8df (TARGET_AVX512F)
6474#define HAVE_avx512vl_scatterdiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6475#define HAVE_avx512vl_scatterdiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6476#define HAVE_avx512vl_scatterdiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6477#define HAVE_avx512vl_scatterdiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6478#define HAVE_avx512vl_scatterdiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
6479#define HAVE_avx512vl_scatterdiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
6480#define HAVE_avx512vl_scatterdiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
6481#define HAVE_avx512vl_scatterdiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
6482#define HAVE_avx512f_expandv16si_maskz (TARGET_AVX512F)
6483#define HAVE_avx512f_expandv16sf_maskz (TARGET_AVX512F)
6484#define HAVE_avx512f_expandv8di_maskz (TARGET_AVX512F)
6485#define HAVE_avx512f_expandv8df_maskz (TARGET_AVX512F)
6486#define HAVE_avx512vl_expandv8si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6487#define HAVE_avx512vl_expandv8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6488#define HAVE_avx512vl_expandv4di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6489#define HAVE_avx512vl_expandv4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6490#define HAVE_avx512vl_expandv4si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6491#define HAVE_avx512vl_expandv4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6492#define HAVE_avx512vl_expandv2di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6493#define HAVE_avx512vl_expandv2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
6494#define HAVE_vpamdd52huqv8di_maskz (TARGET_AVX512IFMA)
6495#define HAVE_vpamdd52huqv4di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
6496#define HAVE_vpamdd52huqv2di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
6497#define HAVE_vpamdd52luqv8di_maskz (TARGET_AVX512IFMA)
6498#define HAVE_vpamdd52luqv4di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
6499#define HAVE_vpamdd52luqv2di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
6500#define HAVE_movv64sf ((TARGET_AVX512F) && (TARGET_AVX5124FMAPS))
6501#define HAVE_movv64si ((TARGET_AVX512F) && (TARGET_AVX5124VNNIW))
6502#define HAVE_sse2_lfence (TARGET_SSE2)
6503#define HAVE_sse_sfence (TARGET_SSE || TARGET_3DNOW_A)
6504#define HAVE_sse2_mfence (TARGET_SSE2)
6505#define HAVE_mem_thread_fence 1
6506#define HAVE_atomic_loadqi 1
6507#define HAVE_atomic_loadhi 1
6508#define HAVE_atomic_loadsi 1
6509#define HAVE_atomic_loaddi (TARGET_64BIT || (TARGET_CMPXCHG8B && (TARGET_80387 || TARGET_SSE)))
6510#define HAVE_atomic_storeqi 1
6511#define HAVE_atomic_storehi 1
6512#define HAVE_atomic_storesi 1
6513#define HAVE_atomic_storedi (TARGET_64BIT || (TARGET_CMPXCHG8B && (TARGET_80387 || TARGET_SSE)))
6514#define HAVE_atomic_compare_and_swapqi (TARGET_CMPXCHG)
6515#define HAVE_atomic_compare_and_swaphi (TARGET_CMPXCHG)
6516#define HAVE_atomic_compare_and_swapsi (TARGET_CMPXCHG)
6517#define HAVE_atomic_compare_and_swapdi ((TARGET_CMPXCHG) && (TARGET_64BIT || TARGET_CMPXCHG8B))
6518#define HAVE_atomic_compare_and_swapti ((TARGET_CMPXCHG) && (TARGET_64BIT && TARGET_CMPXCHG16B))
6519#define HAVE_atomic_bit_test_and_sethi 1
6520#define HAVE_atomic_bit_test_and_setsi 1
6521#define HAVE_atomic_bit_test_and_setdi (TARGET_64BIT)
6522#define HAVE_atomic_bit_test_and_complementhi 1
6523#define HAVE_atomic_bit_test_and_complementsi 1
6524#define HAVE_atomic_bit_test_and_complementdi (TARGET_64BIT)
6525#define HAVE_atomic_bit_test_and_resethi 1
6526#define HAVE_atomic_bit_test_and_resetsi 1
6527#define HAVE_atomic_bit_test_and_resetdi (TARGET_64BIT)
6528extern rtx gen_x86_fnstsw_1 (rtx);
6529extern rtx gen_x86_sahf_1 (rtx);
6530extern rtx gen_insvhi_1 (rtx, rtx);
6531extern rtx gen_insvsi_1 (rtx, rtx);
6532extern rtx gen_insvdi_1 (rtx, rtx);
6533extern rtx gen_swapxf (rtx, rtx);
6534extern rtx gen_zero_extendqidi2 (rtx, rtx);
6535extern rtx gen_zero_extendhidi2 (rtx, rtx);
6536extern rtx gen_zero_extendqisi2_and (rtx, rtx);
6537extern rtx gen_zero_extendhisi2_and (rtx, rtx);
6538extern rtx gen_zero_extendqihi2_and (rtx, rtx);
6539extern rtx gen_extendsidi2_1 (rtx, rtx);
6540extern rtx gen_extendqidi2 (rtx, rtx);
6541extern rtx gen_extendhidi2 (rtx, rtx);
6542extern rtx gen_extendhisi2 (rtx, rtx);
6543extern rtx gen_extendqisi2 (rtx, rtx);
6544extern rtx gen_extendqihi2 (rtx, rtx);
6545extern rtx gen_truncxfsf2_i387_noop (rtx, rtx);
6546extern rtx gen_truncxfdf2_i387_noop (rtx, rtx);
6547extern rtx gen_fix_truncsfsi_sse (rtx, rtx);
6548extern rtx gen_fix_truncsfdi_sse (rtx, rtx);
6549extern rtx gen_fix_truncdfsi_sse (rtx, rtx);
6550extern rtx gen_fix_truncdfdi_sse (rtx, rtx);
6551extern rtx gen_fix_trunchi_fisttp_i387_1 (rtx, rtx);
6552extern rtx gen_fix_truncsi_fisttp_i387_1 (rtx, rtx);
6553extern rtx gen_fix_truncdi_fisttp_i387_1 (rtx, rtx);
6554extern rtx gen_fix_trunchi_i387_fisttp (rtx, rtx);
6555extern rtx gen_fix_truncsi_i387_fisttp (rtx, rtx);
6556extern rtx gen_fix_truncdi_i387_fisttp (rtx, rtx);
6557extern rtx gen_fix_trunchi_i387_fisttp_with_temp (rtx, rtx, rtx);
6558extern rtx gen_fix_truncsi_i387_fisttp_with_temp (rtx, rtx, rtx);
6559extern rtx gen_fix_truncdi_i387_fisttp_with_temp (rtx, rtx, rtx);
6560extern rtx gen_fix_truncdi_i387 (rtx, rtx, rtx, rtx);
6561extern rtx gen_fix_truncdi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
6562extern rtx gen_fix_trunchi_i387 (rtx, rtx, rtx, rtx);
6563extern rtx gen_fix_truncsi_i387 (rtx, rtx, rtx, rtx);
6564extern rtx gen_fix_trunchi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
6565extern rtx gen_fix_truncsi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
6566extern rtx gen_x86_fnstcw_1 (rtx);
6567extern rtx gen_x86_fldcw_1 (rtx);
6568extern rtx gen_floathisf2 (rtx, rtx);
6569extern rtx gen_floathidf2 (rtx, rtx);
6570extern rtx gen_floathixf2 (rtx, rtx);
6571extern rtx gen_floatsixf2 (rtx, rtx);
6572extern rtx gen_floatdixf2 (rtx, rtx);
6573extern rtx gen_floatdisf2_i387_with_xmm (rtx, rtx, rtx);
6574extern rtx gen_floatdidf2_i387_with_xmm (rtx, rtx, rtx);
6575extern rtx gen_floatdixf2_i387_with_xmm (rtx, rtx, rtx);
6576extern rtx gen_addsi_1_zext (rtx, rtx, rtx);
6577extern rtx gen_addqi_ext_1 (rtx, rtx, rtx);
6578extern rtx gen_addqi3_carry (rtx, rtx, rtx, rtx, rtx);
6579extern rtx gen_addhi3_carry (rtx, rtx, rtx, rtx, rtx);
6580extern rtx gen_addsi3_carry (rtx, rtx, rtx, rtx, rtx);
6581extern rtx gen_adddi3_carry (rtx, rtx, rtx, rtx, rtx);
6582extern rtx gen_addcarrysi (rtx, rtx, rtx, rtx, rtx);
6583extern rtx gen_addcarrydi (rtx, rtx, rtx, rtx, rtx);
6584extern rtx gen_subqi3_carry (rtx, rtx, rtx, rtx, rtx);
6585extern rtx gen_subhi3_carry (rtx, rtx, rtx, rtx, rtx);
6586extern rtx gen_subsi3_carry (rtx, rtx, rtx, rtx, rtx);
6587extern rtx gen_subdi3_carry (rtx, rtx, rtx, rtx, rtx);
6588extern rtx gen_subborrowsi (rtx, rtx, rtx, rtx, rtx);
6589extern rtx gen_subborrowdi (rtx, rtx, rtx, rtx, rtx);
6590extern rtx gen_divmodsi4_1 (rtx, rtx, rtx, rtx);
6591extern rtx gen_divmoddi4_1 (rtx, rtx, rtx, rtx);
6592extern rtx gen_divmodhiqi3 (rtx, rtx, rtx);
6593extern rtx gen_udivmodsi4_1 (rtx, rtx, rtx, rtx);
6594extern rtx gen_udivmoddi4_1 (rtx, rtx, rtx, rtx);
6595extern rtx gen_udivmodhiqi3 (rtx, rtx, rtx);
6596extern rtx gen_andqi_ext_1 (rtx, rtx, rtx);
6597extern rtx gen_copysignsf3_const (rtx, rtx, rtx, rtx);
6598extern rtx gen_copysigndf3_const (rtx, rtx, rtx, rtx);
6599extern rtx gen_copysigntf3_const (rtx, rtx, rtx, rtx);
6600extern rtx gen_copysignsf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
6601extern rtx gen_copysigndf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
6602extern rtx gen_copysigntf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
6603extern rtx gen_x86_64_shld (rtx, rtx, rtx);
6604extern rtx gen_x86_shld (rtx, rtx, rtx);
6605extern rtx gen_x86_64_shrd (rtx, rtx, rtx);
6606extern rtx gen_x86_shrd (rtx, rtx, rtx);
6607extern rtx gen_ashrdi3_cvt (rtx, rtx, rtx);
6608extern rtx gen_ashrsi3_cvt (rtx, rtx, rtx);
6609extern rtx gen_ix86_rotldi3_doubleword (rtx, rtx, rtx);
6610extern rtx gen_ix86_rotlti3_doubleword (rtx, rtx, rtx);
6611extern rtx gen_ix86_rotrdi3_doubleword (rtx, rtx, rtx);
6612extern rtx gen_ix86_rotrti3_doubleword (rtx, rtx, rtx);
6613extern rtx gen_setcc_sf_sse (rtx, rtx, rtx, rtx);
6614extern rtx gen_setcc_df_sse (rtx, rtx, rtx, rtx);
6615extern rtx gen_jump (rtx);
6616extern rtx gen_blockage (void);
6617extern rtx gen_prologue_use (rtx);
6618extern rtx gen_simple_return_internal (void);
6619extern rtx gen_interrupt_return (void);
6620extern rtx gen_simple_return_internal_long (void);
6621extern rtx gen_simple_return_pop_internal (rtx);
6622extern rtx gen_simple_return_indirect_internal (rtx);
6623extern rtx gen_nop (void);
6624extern rtx gen_nops (rtx);
6625extern rtx gen_pad (rtx);
6626extern rtx gen_set_got_rex64 (rtx);
6627extern rtx gen_set_rip_rex64 (rtx, rtx);
6628extern rtx gen_set_got_offset_rex64 (rtx, rtx);
6629extern rtx gen_eh_return_internal (void);
6630extern rtx gen_leave (void);
6631extern rtx gen_leave_rex64 (void);
6632extern rtx gen_split_stack_return (rtx);
6633extern rtx gen_ffssi2_no_cmove (rtx, rtx);
6634extern rtx gen_ctzsi2 (rtx, rtx);
6635extern rtx gen_ctzdi2 (rtx, rtx);
6636extern rtx gen_bsr_rex64 (rtx, rtx);
6637extern rtx gen_bsr (rtx, rtx);
6638extern rtx gen_clzsi2_lzcnt (rtx, rtx);
6639extern rtx gen_clzdi2_lzcnt (rtx, rtx);
6640extern rtx gen_tzcnt_si (rtx, rtx);
6641extern rtx gen_lzcnt_si (rtx, rtx);
6642extern rtx gen_tzcnt_di (rtx, rtx);
6643extern rtx gen_lzcnt_di (rtx, rtx);
6644extern rtx gen_tzcnt_hi (rtx, rtx);
6645extern rtx gen_lzcnt_hi (rtx, rtx);
6646extern rtx gen_bmi_bextr_si (rtx, rtx, rtx);
6647extern rtx gen_bmi_bextr_di (rtx, rtx, rtx);
6648extern rtx gen_bmi2_pdep_si3 (rtx, rtx, rtx);
6649extern rtx gen_bmi2_pdep_di3 (rtx, rtx, rtx);
6650extern rtx gen_bmi2_pext_si3 (rtx, rtx, rtx);
6651extern rtx gen_bmi2_pext_di3 (rtx, rtx, rtx);
6652extern rtx gen_tbm_bextri_si (rtx, rtx, rtx, rtx);
6653extern rtx gen_tbm_bextri_di (rtx, rtx, rtx, rtx);
6654extern rtx gen_popcountsi2 (rtx, rtx);
6655extern rtx gen_popcountdi2 (rtx, rtx);
6656extern rtx gen_popcounthi2 (rtx, rtx);
6657extern rtx gen_bswaphi_lowpart (rtx);
6658extern rtx gen_paritydi2_cmp (rtx, rtx, rtx, rtx);
6659extern rtx gen_paritysi2_cmp (rtx, rtx, rtx);
6660static inline rtx gen_tls_initial_exec_64_sun (rtx, rtx);
6661static inline rtx
6662gen_tls_initial_exec_64_sun(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
6663{
6664 return 0;
6665}
6666extern rtx gen_truncxfsf2_i387_noop_unspec (rtx, rtx);
6667extern rtx gen_truncxfdf2_i387_noop_unspec (rtx, rtx);
6668extern rtx gen_sqrtxf2 (rtx, rtx);
6669extern rtx gen_sqrt_extendsfxf2_i387 (rtx, rtx);
6670extern rtx gen_sqrt_extenddfxf2_i387 (rtx, rtx);
6671extern rtx gen_fpremxf4_i387 (rtx, rtx, rtx, rtx);
6672extern rtx gen_fprem1xf4_i387 (rtx, rtx, rtx, rtx);
6673extern rtx gen_sincosxf3 (rtx, rtx, rtx);
6674extern rtx gen_sincos_extendsfxf3_i387 (rtx, rtx, rtx);
6675extern rtx gen_sincos_extenddfxf3_i387 (rtx, rtx, rtx);
6676extern rtx gen_fptanxf4_i387 (rtx, rtx, rtx, rtx);
6677extern rtx gen_fptan_extendsfxf4_i387 (rtx, rtx, rtx, rtx);
6678extern rtx gen_fptan_extenddfxf4_i387 (rtx, rtx, rtx, rtx);
6679extern rtx gen_fpatan_extendsfxf3_i387 (rtx, rtx, rtx);
6680extern rtx gen_fpatan_extenddfxf3_i387 (rtx, rtx, rtx);
6681extern rtx gen_fyl2xxf3_i387 (rtx, rtx, rtx);
6682extern rtx gen_fyl2x_extendsfxf3_i387 (rtx, rtx, rtx);
6683extern rtx gen_fyl2x_extenddfxf3_i387 (rtx, rtx, rtx);
6684extern rtx gen_fyl2xp1xf3_i387 (rtx, rtx, rtx);
6685extern rtx gen_fyl2xp1_extendsfxf3_i387 (rtx, rtx, rtx);
6686extern rtx gen_fyl2xp1_extenddfxf3_i387 (rtx, rtx, rtx);
6687extern rtx gen_fxtractxf3_i387 (rtx, rtx, rtx);
6688extern rtx gen_fxtract_extendsfxf3_i387 (rtx, rtx, rtx);
6689extern rtx gen_fxtract_extenddfxf3_i387 (rtx, rtx, rtx);
6690extern rtx gen_fscalexf4_i387 (rtx, rtx, rtx, rtx);
6691extern rtx gen_sse4_1_roundsf2 (rtx, rtx, rtx);
6692extern rtx gen_sse4_1_rounddf2 (rtx, rtx, rtx);
6693extern rtx gen_rintxf2 (rtx, rtx);
6694extern rtx gen_rintsf2_frndint (rtx, rtx);
6695extern rtx gen_rintdf2_frndint (rtx, rtx);
6696extern rtx gen_fistdi2 (rtx, rtx);
6697extern rtx gen_fistdi2_with_temp (rtx, rtx, rtx);
6698extern rtx gen_fisthi2 (rtx, rtx);
6699extern rtx gen_fistsi2 (rtx, rtx);
6700extern rtx gen_fisthi2_with_temp (rtx, rtx, rtx);
6701extern rtx gen_fistsi2_with_temp (rtx, rtx, rtx);
6702extern rtx gen_frndintsf2_floor (rtx, rtx);
6703extern rtx gen_frndintsf2_ceil (rtx, rtx);
6704extern rtx gen_frndintsf2_trunc (rtx, rtx);
6705extern rtx gen_frndintdf2_floor (rtx, rtx);
6706extern rtx gen_frndintdf2_ceil (rtx, rtx);
6707extern rtx gen_frndintdf2_trunc (rtx, rtx);
6708extern rtx gen_frndintxf2_floor (rtx, rtx);
6709extern rtx gen_frndintxf2_ceil (rtx, rtx);
6710extern rtx gen_frndintxf2_trunc (rtx, rtx);
6711extern rtx gen_frndintsf2_floor_i387 (rtx, rtx, rtx, rtx);
6712extern rtx gen_frndintsf2_ceil_i387 (rtx, rtx, rtx, rtx);
6713extern rtx gen_frndintsf2_trunc_i387 (rtx, rtx, rtx, rtx);
6714extern rtx gen_frndintdf2_floor_i387 (rtx, rtx, rtx, rtx);
6715extern rtx gen_frndintdf2_ceil_i387 (rtx, rtx, rtx, rtx);
6716extern rtx gen_frndintdf2_trunc_i387 (rtx, rtx, rtx, rtx);
6717extern rtx gen_frndintxf2_floor_i387 (rtx, rtx, rtx, rtx);
6718extern rtx gen_frndintxf2_ceil_i387 (rtx, rtx, rtx, rtx);
6719extern rtx gen_frndintxf2_trunc_i387 (rtx, rtx, rtx, rtx);
6720extern rtx gen_frndintxf2_mask_pm (rtx, rtx);
6721extern rtx gen_frndintxf2_mask_pm_i387 (rtx, rtx, rtx, rtx);
6722extern rtx gen_fistdi2_floor (rtx, rtx, rtx, rtx);
6723extern rtx gen_fistdi2_ceil (rtx, rtx, rtx, rtx);
6724extern rtx gen_fistdi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
6725extern rtx gen_fistdi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
6726extern rtx gen_fisthi2_floor (rtx, rtx, rtx, rtx);
6727extern rtx gen_fisthi2_ceil (rtx, rtx, rtx, rtx);
6728extern rtx gen_fistsi2_floor (rtx, rtx, rtx, rtx);
6729extern rtx gen_fistsi2_ceil (rtx, rtx, rtx, rtx);
6730extern rtx gen_fisthi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
6731extern rtx gen_fisthi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
6732extern rtx gen_fistsi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
6733extern rtx gen_fistsi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
6734extern rtx gen_fxamsf2_i387 (rtx, rtx);
6735extern rtx gen_fxamdf2_i387 (rtx, rtx);
6736extern rtx gen_fxamxf2_i387 (rtx, rtx);
6737extern rtx gen_fxamsf2_i387_with_temp (rtx, rtx);
6738extern rtx gen_fxamdf2_i387_with_temp (rtx, rtx);
6739extern rtx gen_movmsk_df (rtx, rtx);
6740extern rtx gen_cld (void);
6741extern rtx gen_smaxsf3 (rtx, rtx, rtx);
6742extern rtx gen_sminsf3 (rtx, rtx, rtx);
6743extern rtx gen_smaxdf3 (rtx, rtx, rtx);
6744extern rtx gen_smindf3 (rtx, rtx, rtx);
6745extern rtx gen_pro_epilogue_adjust_stack_si_add (rtx, rtx, rtx);
6746extern rtx gen_pro_epilogue_adjust_stack_di_add (rtx, rtx, rtx);
6747extern rtx gen_pro_epilogue_adjust_stack_si_sub (rtx, rtx, rtx);
6748extern rtx gen_pro_epilogue_adjust_stack_di_sub (rtx, rtx, rtx);
6749extern rtx gen_allocate_stack_worker_probe_si (rtx, rtx);
6750extern rtx gen_allocate_stack_worker_probe_di (rtx, rtx);
6751extern rtx gen_probe_stack_si (rtx, rtx);
6752extern rtx gen_probe_stack_di (rtx, rtx);
6753extern rtx gen_adjust_stack_and_probesi (rtx, rtx, rtx);
6754extern rtx gen_adjust_stack_and_probedi (rtx, rtx, rtx);
6755extern rtx gen_probe_stack_rangesi (rtx, rtx, rtx);
6756extern rtx gen_probe_stack_rangedi (rtx, rtx, rtx);
6757extern rtx gen_trap (void);
6758extern rtx gen_stack_protect_set_si (rtx, rtx);
6759extern rtx gen_stack_protect_set_di (rtx, rtx);
6760extern rtx gen_stack_tls_protect_set_si (rtx, rtx);
6761extern rtx gen_stack_tls_protect_set_di (rtx, rtx);
6762extern rtx gen_stack_protect_test_si (rtx, rtx, rtx);
6763extern rtx gen_stack_protect_test_di (rtx, rtx, rtx);
6764extern rtx gen_stack_tls_protect_test_si (rtx, rtx, rtx);
6765extern rtx gen_stack_tls_protect_test_di (rtx, rtx, rtx);
6766extern rtx gen_sse4_2_crc32qi (rtx, rtx, rtx);
6767extern rtx gen_sse4_2_crc32hi (rtx, rtx, rtx);
6768extern rtx gen_sse4_2_crc32si (rtx, rtx, rtx);
6769extern rtx gen_sse4_2_crc32di (rtx, rtx, rtx);
6770extern rtx gen_rdpmc (rtx, rtx);
6771extern rtx gen_rdpmc_rex64 (rtx, rtx, rtx);
6772extern rtx gen_rdtsc (rtx);
6773extern rtx gen_rdtsc_rex64 (rtx, rtx);
6774extern rtx gen_rdtscp (rtx, rtx);
6775extern rtx gen_rdtscp_rex64 (rtx, rtx, rtx);
6776extern rtx gen_fxsave (rtx);
6777extern rtx gen_fxsave64 (rtx);
6778extern rtx gen_fxrstor (rtx);
6779extern rtx gen_fxrstor64 (rtx);
6780extern rtx gen_xsave (rtx, rtx);
6781extern rtx gen_xsaveopt (rtx, rtx);
6782extern rtx gen_xsavec (rtx, rtx);
6783extern rtx gen_xsaves (rtx, rtx);
6784extern rtx gen_xsave_rex64 (rtx, rtx, rtx);
6785extern rtx gen_xsaveopt_rex64 (rtx, rtx, rtx);
6786extern rtx gen_xsavec_rex64 (rtx, rtx, rtx);
6787extern rtx gen_xsaves_rex64 (rtx, rtx, rtx);
6788extern rtx gen_xsave64 (rtx, rtx, rtx);
6789extern rtx gen_xsaveopt64 (rtx, rtx, rtx);
6790extern rtx gen_xsavec64 (rtx, rtx, rtx);
6791extern rtx gen_xsaves64 (rtx, rtx, rtx);
6792extern rtx gen_xrstor (rtx, rtx);
6793extern rtx gen_xrstors (rtx, rtx);
6794extern rtx gen_xrstor_rex64 (rtx, rtx, rtx);
6795extern rtx gen_xrstors_rex64 (rtx, rtx, rtx);
6796extern rtx gen_xrstor64 (rtx, rtx, rtx);
6797extern rtx gen_xrstors64 (rtx, rtx, rtx);
6798extern rtx gen_fnstenv (rtx);
6799extern rtx gen_fldenv (rtx);
6800extern rtx gen_fnstsw (rtx);
6801extern rtx gen_fnclex (void);
6802extern rtx gen_lwp_slwpcbsi (rtx);
6803extern rtx gen_lwp_slwpcbdi (rtx);
6804extern rtx gen_rdfsbasesi (rtx);
6805extern rtx gen_rdgsbasesi (rtx);
6806extern rtx gen_rdfsbasedi (rtx);
6807extern rtx gen_rdgsbasedi (rtx);
6808extern rtx gen_wrfsbasesi (rtx);
6809extern rtx gen_wrgsbasesi (rtx);
6810extern rtx gen_wrfsbasedi (rtx);
6811extern rtx gen_wrgsbasedi (rtx);
6812extern rtx gen_rdrandhi_1 (rtx);
6813extern rtx gen_rdrandsi_1 (rtx);
6814extern rtx gen_rdranddi_1 (rtx);
6815extern rtx gen_rdseedhi_1 (rtx);
6816extern rtx gen_rdseedsi_1 (rtx);
6817extern rtx gen_rdseeddi_1 (rtx);
6818extern rtx gen_xbegin_1 (rtx, rtx);
6819extern rtx gen_xend (void);
6820extern rtx gen_xabort (rtx);
6821extern rtx gen_xtest_1 (void);
6822extern rtx gen_clwb (rtx);
6823extern rtx gen_clflushopt (rtx);
6824extern rtx gen_mwaitx (rtx, rtx, rtx);
6825extern rtx gen_monitorx_si (rtx, rtx, rtx);
6826extern rtx gen_monitorx_di (rtx, rtx, rtx);
6827extern rtx gen_clzero_si (rtx);
6828extern rtx gen_clzero_di (rtx);
6829extern rtx gen_move_size_reloc_si (rtx, rtx);
6830extern rtx gen_move_size_reloc_di (rtx, rtx);
6831extern rtx gen_rdpid (rtx);
6832extern rtx gen_sse_movntq (rtx, rtx);
6833extern rtx gen_mmx_ieee_maxv2sf3 (rtx, rtx, rtx);
6834extern rtx gen_mmx_ieee_minv2sf3 (rtx, rtx, rtx);
6835extern rtx gen_mmx_rcpv2sf2 (rtx, rtx);
6836extern rtx gen_mmx_rcpit1v2sf3 (rtx, rtx, rtx);
6837extern rtx gen_mmx_rcpit2v2sf3 (rtx, rtx, rtx);
6838extern rtx gen_mmx_rsqrtv2sf2 (rtx, rtx);
6839extern rtx gen_mmx_rsqit1v2sf3 (rtx, rtx, rtx);
6840extern rtx gen_mmx_haddv2sf3 (rtx, rtx, rtx);
6841extern rtx gen_mmx_hsubv2sf3 (rtx, rtx, rtx);
6842extern rtx gen_mmx_addsubv2sf3 (rtx, rtx, rtx);
6843extern rtx gen_mmx_gtv2sf3 (rtx, rtx, rtx);
6844extern rtx gen_mmx_gev2sf3 (rtx, rtx, rtx);
6845extern rtx gen_mmx_pf2id (rtx, rtx);
6846extern rtx gen_mmx_pf2iw (rtx, rtx);
6847extern rtx gen_mmx_pi2fw (rtx, rtx);
6848extern rtx gen_mmx_floatv2si2 (rtx, rtx);
6849extern rtx gen_mmx_pswapdv2sf2 (rtx, rtx);
6850extern rtx gen_mmx_ashrv4hi3 (rtx, rtx, rtx);
6851extern rtx gen_mmx_ashrv2si3 (rtx, rtx, rtx);
6852extern rtx gen_mmx_ashlv4hi3 (rtx, rtx, rtx);
6853extern rtx gen_mmx_lshrv4hi3 (rtx, rtx, rtx);
6854extern rtx gen_mmx_ashlv2si3 (rtx, rtx, rtx);
6855extern rtx gen_mmx_lshrv2si3 (rtx, rtx, rtx);
6856extern rtx gen_mmx_ashlv1di3 (rtx, rtx, rtx);
6857extern rtx gen_mmx_lshrv1di3 (rtx, rtx, rtx);
6858extern rtx gen_mmx_gtv8qi3 (rtx, rtx, rtx);
6859extern rtx gen_mmx_gtv4hi3 (rtx, rtx, rtx);
6860extern rtx gen_mmx_gtv2si3 (rtx, rtx, rtx);
6861extern rtx gen_mmx_andnotv8qi3 (rtx, rtx, rtx);
6862extern rtx gen_mmx_andnotv4hi3 (rtx, rtx, rtx);
6863extern rtx gen_mmx_andnotv2si3 (rtx, rtx, rtx);
6864extern rtx gen_mmx_packsswb (rtx, rtx, rtx);
6865extern rtx gen_mmx_packssdw (rtx, rtx, rtx);
6866extern rtx gen_mmx_packuswb (rtx, rtx, rtx);
6867extern rtx gen_mmx_punpckhbw (rtx, rtx, rtx);
6868extern rtx gen_mmx_punpcklbw (rtx, rtx, rtx);
6869extern rtx gen_mmx_punpckhwd (rtx, rtx, rtx);
6870extern rtx gen_mmx_punpcklwd (rtx, rtx, rtx);
6871extern rtx gen_mmx_punpckhdq (rtx, rtx, rtx);
6872extern rtx gen_mmx_punpckldq (rtx, rtx, rtx);
6873extern rtx gen_mmx_pextrw (rtx, rtx, rtx);
6874extern rtx gen_mmx_pshufw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
6875extern rtx gen_mmx_pswapdv2si2 (rtx, rtx);
6876extern rtx gen_mmx_psadbw (rtx, rtx, rtx);
6877extern rtx gen_mmx_pmovmskb (rtx, rtx);
6878extern rtx gen_movv64qi_internal (rtx, rtx);
6879extern rtx gen_movv32qi_internal (rtx, rtx);
6880extern rtx gen_movv16qi_internal (rtx, rtx);
6881extern rtx gen_movv32hi_internal (rtx, rtx);
6882extern rtx gen_movv16hi_internal (rtx, rtx);
6883extern rtx gen_movv8hi_internal (rtx, rtx);
6884extern rtx gen_movv16si_internal (rtx, rtx);
6885extern rtx gen_movv8si_internal (rtx, rtx);
6886extern rtx gen_movv4si_internal (rtx, rtx);
6887extern rtx gen_movv8di_internal (rtx, rtx);
6888extern rtx gen_movv4di_internal (rtx, rtx);
6889extern rtx gen_movv2di_internal (rtx, rtx);
6890extern rtx gen_movv4ti_internal (rtx, rtx);
6891extern rtx gen_movv2ti_internal (rtx, rtx);
6892extern rtx gen_movv1ti_internal (rtx, rtx);
6893extern rtx gen_movv16sf_internal (rtx, rtx);
6894extern rtx gen_movv8sf_internal (rtx, rtx);
6895extern rtx gen_movv4sf_internal (rtx, rtx);
6896extern rtx gen_movv8df_internal (rtx, rtx);
6897extern rtx gen_movv4df_internal (rtx, rtx);
6898extern rtx gen_movv2df_internal (rtx, rtx);
6899extern rtx gen_avx512f_loadv16si_mask (rtx, rtx, rtx, rtx);
6900extern rtx gen_avx512vl_loadv8si_mask (rtx, rtx, rtx, rtx);
6901extern rtx gen_avx512vl_loadv4si_mask (rtx, rtx, rtx, rtx);
6902extern rtx gen_avx512f_loadv8di_mask (rtx, rtx, rtx, rtx);
6903extern rtx gen_avx512vl_loadv4di_mask (rtx, rtx, rtx, rtx);
6904extern rtx gen_avx512vl_loadv2di_mask (rtx, rtx, rtx, rtx);
6905extern rtx gen_avx512f_loadv16sf_mask (rtx, rtx, rtx, rtx);
6906extern rtx gen_avx512vl_loadv8sf_mask (rtx, rtx, rtx, rtx);
6907extern rtx gen_avx512vl_loadv4sf_mask (rtx, rtx, rtx, rtx);
6908extern rtx gen_avx512f_loadv8df_mask (rtx, rtx, rtx, rtx);
6909extern rtx gen_avx512vl_loadv4df_mask (rtx, rtx, rtx, rtx);
6910extern rtx gen_avx512vl_loadv2df_mask (rtx, rtx, rtx, rtx);
6911extern rtx gen_avx512bw_loadv64qi_mask (rtx, rtx, rtx, rtx);
6912extern rtx gen_avx512vl_loadv16qi_mask (rtx, rtx, rtx, rtx);
6913extern rtx gen_avx512vl_loadv32qi_mask (rtx, rtx, rtx, rtx);
6914extern rtx gen_avx512bw_loadv32hi_mask (rtx, rtx, rtx, rtx);
6915extern rtx gen_avx512vl_loadv16hi_mask (rtx, rtx, rtx, rtx);
6916extern rtx gen_avx512vl_loadv8hi_mask (rtx, rtx, rtx, rtx);
6917extern rtx gen_avx512f_blendmv16si (rtx, rtx, rtx, rtx);
6918extern rtx gen_avx512vl_blendmv8si (rtx, rtx, rtx, rtx);
6919extern rtx gen_avx512vl_blendmv4si (rtx, rtx, rtx, rtx);
6920extern rtx gen_avx512f_blendmv8di (rtx, rtx, rtx, rtx);
6921extern rtx gen_avx512vl_blendmv4di (rtx, rtx, rtx, rtx);
6922extern rtx gen_avx512vl_blendmv2di (rtx, rtx, rtx, rtx);
6923extern rtx gen_avx512f_blendmv16sf (rtx, rtx, rtx, rtx);
6924extern rtx gen_avx512vl_blendmv8sf (rtx, rtx, rtx, rtx);
6925extern rtx gen_avx512vl_blendmv4sf (rtx, rtx, rtx, rtx);
6926extern rtx gen_avx512f_blendmv8df (rtx, rtx, rtx, rtx);
6927extern rtx gen_avx512vl_blendmv4df (rtx, rtx, rtx, rtx);
6928extern rtx gen_avx512vl_blendmv2df (rtx, rtx, rtx, rtx);
6929extern rtx gen_avx512bw_blendmv64qi (rtx, rtx, rtx, rtx);
6930extern rtx gen_avx512vl_blendmv16qi (rtx, rtx, rtx, rtx);
6931extern rtx gen_avx512vl_blendmv32qi (rtx, rtx, rtx, rtx);
6932extern rtx gen_avx512bw_blendmv32hi (rtx, rtx, rtx, rtx);
6933extern rtx gen_avx512vl_blendmv16hi (rtx, rtx, rtx, rtx);
6934extern rtx gen_avx512vl_blendmv8hi (rtx, rtx, rtx, rtx);
6935extern rtx gen_avx512f_storev16si_mask (rtx, rtx, rtx);
6936extern rtx gen_avx512vl_storev8si_mask (rtx, rtx, rtx);
6937extern rtx gen_avx512vl_storev4si_mask (rtx, rtx, rtx);
6938extern rtx gen_avx512f_storev8di_mask (rtx, rtx, rtx);
6939extern rtx gen_avx512vl_storev4di_mask (rtx, rtx, rtx);
6940extern rtx gen_avx512vl_storev2di_mask (rtx, rtx, rtx);
6941extern rtx gen_avx512f_storev16sf_mask (rtx, rtx, rtx);
6942extern rtx gen_avx512vl_storev8sf_mask (rtx, rtx, rtx);
6943extern rtx gen_avx512vl_storev4sf_mask (rtx, rtx, rtx);
6944extern rtx gen_avx512f_storev8df_mask (rtx, rtx, rtx);
6945extern rtx gen_avx512vl_storev4df_mask (rtx, rtx, rtx);
6946extern rtx gen_avx512vl_storev2df_mask (rtx, rtx, rtx);
6947extern rtx gen_avx512bw_storev64qi_mask (rtx, rtx, rtx);
6948extern rtx gen_avx512vl_storev16qi_mask (rtx, rtx, rtx);
6949extern rtx gen_avx512vl_storev32qi_mask (rtx, rtx, rtx);
6950extern rtx gen_avx512bw_storev32hi_mask (rtx, rtx, rtx);
6951extern rtx gen_avx512vl_storev16hi_mask (rtx, rtx, rtx);
6952extern rtx gen_avx512vl_storev8hi_mask (rtx, rtx, rtx);
6953extern rtx gen_sse2_movq128 (rtx, rtx);
6954extern rtx gen_movdi_to_sse (rtx, rtx);
6955extern rtx gen_avx_lddqu256 (rtx, rtx);
6956extern rtx gen_sse3_lddqu (rtx, rtx);
6957extern rtx gen_sse2_movntisi (rtx, rtx);
6958extern rtx gen_sse2_movntidi (rtx, rtx);
6959extern rtx gen_avx512f_movntv16sf (rtx, rtx);
6960extern rtx gen_avx_movntv8sf (rtx, rtx);
6961extern rtx gen_sse_movntv4sf (rtx, rtx);
6962extern rtx gen_avx512f_movntv8df (rtx, rtx);
6963extern rtx gen_avx_movntv4df (rtx, rtx);
6964extern rtx gen_sse2_movntv2df (rtx, rtx);
6965extern rtx gen_avx512f_movntv8di (rtx, rtx);
6966extern rtx gen_avx_movntv4di (rtx, rtx);
6967extern rtx gen_sse2_movntv2di (rtx, rtx);
6968extern rtx gen_kandqi (rtx, rtx, rtx);
6969extern rtx gen_kiorqi (rtx, rtx, rtx);
6970extern rtx gen_kxorqi (rtx, rtx, rtx);
6971extern rtx gen_kandhi (rtx, rtx, rtx);
6972extern rtx gen_kiorhi (rtx, rtx, rtx);
6973extern rtx gen_kxorhi (rtx, rtx, rtx);
6974extern rtx gen_kandsi (rtx, rtx, rtx);
6975extern rtx gen_kiorsi (rtx, rtx, rtx);
6976extern rtx gen_kxorsi (rtx, rtx, rtx);
6977extern rtx gen_kanddi (rtx, rtx, rtx);
6978extern rtx gen_kiordi (rtx, rtx, rtx);
6979extern rtx gen_kxordi (rtx, rtx, rtx);
6980extern rtx gen_kandnqi (rtx, rtx, rtx);
6981extern rtx gen_kandnhi (rtx, rtx, rtx);
6982extern rtx gen_kandnsi (rtx, rtx, rtx);
6983extern rtx gen_kandndi (rtx, rtx, rtx);
6984extern rtx gen_kxnorqi (rtx, rtx, rtx);
6985extern rtx gen_kxnorhi (rtx, rtx, rtx);
6986extern rtx gen_kxnorsi (rtx, rtx, rtx);
6987extern rtx gen_kxnordi (rtx, rtx, rtx);
6988extern rtx gen_knotqi (rtx, rtx);
6989extern rtx gen_knothi (rtx, rtx);
6990extern rtx gen_knotsi (rtx, rtx);
6991extern rtx gen_knotdi (rtx, rtx);
6992extern rtx gen_kaddqi (rtx, rtx, rtx);
6993extern rtx gen_kaddhi (rtx, rtx, rtx);
6994extern rtx gen_kaddsi (rtx, rtx, rtx);
6995extern rtx gen_kadddi (rtx, rtx, rtx);
6996extern rtx gen_kashiftqi (rtx, rtx, rtx);
6997extern rtx gen_klshiftrtqi (rtx, rtx, rtx);
6998extern rtx gen_kashifthi (rtx, rtx, rtx);
6999extern rtx gen_klshiftrthi (rtx, rtx, rtx);
7000extern rtx gen_kashiftsi (rtx, rtx, rtx);
7001extern rtx gen_klshiftrtsi (rtx, rtx, rtx);
7002extern rtx gen_kashiftdi (rtx, rtx, rtx);
7003extern rtx gen_klshiftrtdi (rtx, rtx, rtx);
7004extern rtx gen_ktestqi (rtx, rtx);
7005extern rtx gen_ktesthi (rtx, rtx);
7006extern rtx gen_ktestsi (rtx, rtx);
7007extern rtx gen_ktestdi (rtx, rtx);
7008extern rtx gen_kortestqi (rtx, rtx);
7009extern rtx gen_kortesthi (rtx, rtx);
7010extern rtx gen_kortestsi (rtx, rtx);
7011extern rtx gen_kortestdi (rtx, rtx);
7012extern rtx gen_kunpckhi (rtx, rtx, rtx);
7013extern rtx gen_kunpcksi (rtx, rtx, rtx);
7014extern rtx gen_kunpckdi (rtx, rtx, rtx);
7015extern rtx gen_sse_vmaddv4sf3 (rtx, rtx, rtx);
7016extern rtx gen_sse_vmaddv4sf3_round (rtx, rtx, rtx, rtx);
7017extern rtx gen_sse_vmsubv4sf3 (rtx, rtx, rtx);
7018extern rtx gen_sse_vmsubv4sf3_round (rtx, rtx, rtx, rtx);
7019extern rtx gen_sse2_vmaddv2df3 (rtx, rtx, rtx);
7020extern rtx gen_sse2_vmaddv2df3_round (rtx, rtx, rtx, rtx);
7021extern rtx gen_sse2_vmsubv2df3 (rtx, rtx, rtx);
7022extern rtx gen_sse2_vmsubv2df3_round (rtx, rtx, rtx, rtx);
7023extern rtx gen_sse_vmmulv4sf3 (rtx, rtx, rtx);
7024extern rtx gen_sse_vmmulv4sf3_round (rtx, rtx, rtx, rtx);
7025extern rtx gen_sse_vmdivv4sf3 (rtx, rtx, rtx);
7026extern rtx gen_sse_vmdivv4sf3_round (rtx, rtx, rtx, rtx);
7027extern rtx gen_sse2_vmmulv2df3 (rtx, rtx, rtx);
7028extern rtx gen_sse2_vmmulv2df3_round (rtx, rtx, rtx, rtx);
7029extern rtx gen_sse2_vmdivv2df3 (rtx, rtx, rtx);
7030extern rtx gen_sse2_vmdivv2df3_round (rtx, rtx, rtx, rtx);
7031extern rtx gen_avx512f_divv16sf3 (rtx, rtx, rtx);
7032extern rtx gen_avx512f_divv16sf3_round (rtx, rtx, rtx, rtx);
7033extern rtx gen_avx512f_divv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
7034extern rtx gen_avx512f_divv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7035extern rtx gen_avx_divv8sf3 (rtx, rtx, rtx);
7036static inline rtx gen_avx_divv8sf3_round (rtx, rtx, rtx, rtx);
7037static inline rtx
7038gen_avx_divv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7039{
7040 return 0;
7041}
7042extern rtx gen_avx_divv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
7043static inline rtx gen_avx_divv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7044static inline rtx
7045gen_avx_divv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7046{
7047 return 0;
7048}
7049extern rtx gen_sse_divv4sf3 (rtx, rtx, rtx);
7050static inline rtx gen_sse_divv4sf3_round (rtx, rtx, rtx, rtx);
7051static inline rtx
7052gen_sse_divv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7053{
7054 return 0;
7055}
7056extern rtx gen_sse_divv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7057static inline rtx gen_sse_divv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7058static inline rtx
7059gen_sse_divv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7060{
7061 return 0;
7062}
7063extern rtx gen_avx512f_divv8df3 (rtx, rtx, rtx);
7064extern rtx gen_avx512f_divv8df3_round (rtx, rtx, rtx, rtx);
7065extern rtx gen_avx512f_divv8df3_mask (rtx, rtx, rtx, rtx, rtx);
7066extern rtx gen_avx512f_divv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7067extern rtx gen_avx_divv4df3 (rtx, rtx, rtx);
7068static inline rtx gen_avx_divv4df3_round (rtx, rtx, rtx, rtx);
7069static inline rtx
7070gen_avx_divv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7071{
7072 return 0;
7073}
7074extern rtx gen_avx_divv4df3_mask (rtx, rtx, rtx, rtx, rtx);
7075static inline rtx gen_avx_divv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7076static inline rtx
7077gen_avx_divv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7078{
7079 return 0;
7080}
7081extern rtx gen_sse2_divv2df3 (rtx, rtx, rtx);
7082static inline rtx gen_sse2_divv2df3_round (rtx, rtx, rtx, rtx);
7083static inline rtx
7084gen_sse2_divv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7085{
7086 return 0;
7087}
7088extern rtx gen_sse2_divv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7089static inline rtx gen_sse2_divv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7090static inline rtx
7091gen_sse2_divv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7092{
7093 return 0;
7094}
7095extern rtx gen_avx_rcpv8sf2 (rtx, rtx);
7096extern rtx gen_sse_rcpv4sf2 (rtx, rtx);
7097extern rtx gen_sse_vmrcpv4sf2 (rtx, rtx, rtx);
7098extern rtx gen_rcp14v16sf_mask (rtx, rtx, rtx, rtx);
7099extern rtx gen_rcp14v8sf_mask (rtx, rtx, rtx, rtx);
7100extern rtx gen_rcp14v4sf_mask (rtx, rtx, rtx, rtx);
7101extern rtx gen_rcp14v8df_mask (rtx, rtx, rtx, rtx);
7102extern rtx gen_rcp14v4df_mask (rtx, rtx, rtx, rtx);
7103extern rtx gen_rcp14v2df_mask (rtx, rtx, rtx, rtx);
7104extern rtx gen_srcp14v4sf (rtx, rtx, rtx);
7105extern rtx gen_srcp14v2df (rtx, rtx, rtx);
7106extern rtx gen_avx512f_sqrtv16sf2 (rtx, rtx);
7107extern rtx gen_avx512f_sqrtv16sf2_round (rtx, rtx, rtx);
7108extern rtx gen_avx512f_sqrtv16sf2_mask (rtx, rtx, rtx, rtx);
7109extern rtx gen_avx512f_sqrtv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
7110extern rtx gen_avx_sqrtv8sf2 (rtx, rtx);
7111static inline rtx gen_avx_sqrtv8sf2_round (rtx, rtx, rtx);
7112static inline rtx
7113gen_avx_sqrtv8sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
7114{
7115 return 0;
7116}
7117extern rtx gen_avx_sqrtv8sf2_mask (rtx, rtx, rtx, rtx);
7118static inline rtx gen_avx_sqrtv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
7119static inline rtx
7120gen_avx_sqrtv8sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7121{
7122 return 0;
7123}
7124extern rtx gen_sse_sqrtv4sf2 (rtx, rtx);
7125static inline rtx gen_sse_sqrtv4sf2_round (rtx, rtx, rtx);
7126static inline rtx
7127gen_sse_sqrtv4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
7128{
7129 return 0;
7130}
7131extern rtx gen_sse_sqrtv4sf2_mask (rtx, rtx, rtx, rtx);
7132static inline rtx gen_sse_sqrtv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
7133static inline rtx
7134gen_sse_sqrtv4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7135{
7136 return 0;
7137}
7138extern rtx gen_avx512f_sqrtv8df2 (rtx, rtx);
7139extern rtx gen_avx512f_sqrtv8df2_round (rtx, rtx, rtx);
7140extern rtx gen_avx512f_sqrtv8df2_mask (rtx, rtx, rtx, rtx);
7141extern rtx gen_avx512f_sqrtv8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
7142extern rtx gen_avx_sqrtv4df2 (rtx, rtx);
7143static inline rtx gen_avx_sqrtv4df2_round (rtx, rtx, rtx);
7144static inline rtx
7145gen_avx_sqrtv4df2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
7146{
7147 return 0;
7148}
7149extern rtx gen_avx_sqrtv4df2_mask (rtx, rtx, rtx, rtx);
7150static inline rtx gen_avx_sqrtv4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
7151static inline rtx
7152gen_avx_sqrtv4df2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7153{
7154 return 0;
7155}
7156extern rtx gen_sse2_sqrtv2df2 (rtx, rtx);
7157static inline rtx gen_sse2_sqrtv2df2_round (rtx, rtx, rtx);
7158static inline rtx
7159gen_sse2_sqrtv2df2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
7160{
7161 return 0;
7162}
7163extern rtx gen_sse2_sqrtv2df2_mask (rtx, rtx, rtx, rtx);
7164static inline rtx gen_sse2_sqrtv2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
7165static inline rtx
7166gen_sse2_sqrtv2df2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7167{
7168 return 0;
7169}
7170extern rtx gen_sse_vmsqrtv4sf2 (rtx, rtx, rtx);
7171extern rtx gen_sse_vmsqrtv4sf2_round (rtx, rtx, rtx, rtx);
7172extern rtx gen_sse2_vmsqrtv2df2 (rtx, rtx, rtx);
7173extern rtx gen_sse2_vmsqrtv2df2_round (rtx, rtx, rtx, rtx);
7174extern rtx gen_avx_rsqrtv8sf2 (rtx, rtx);
7175extern rtx gen_sse_rsqrtv4sf2 (rtx, rtx);
7176extern rtx gen_rsqrt14v16sf_mask (rtx, rtx, rtx, rtx);
7177extern rtx gen_rsqrt14v8sf_mask (rtx, rtx, rtx, rtx);
7178extern rtx gen_rsqrt14v4sf_mask (rtx, rtx, rtx, rtx);
7179extern rtx gen_rsqrt14v8df_mask (rtx, rtx, rtx, rtx);
7180extern rtx gen_rsqrt14v4df_mask (rtx, rtx, rtx, rtx);
7181extern rtx gen_rsqrt14v2df_mask (rtx, rtx, rtx, rtx);
7182extern rtx gen_rsqrt14v4sf (rtx, rtx, rtx);
7183extern rtx gen_rsqrt14v2df (rtx, rtx, rtx);
7184extern rtx gen_sse_vmrsqrtv4sf2 (rtx, rtx, rtx);
7185extern rtx gen_ieee_maxv16sf3 (rtx, rtx, rtx);
7186extern rtx gen_ieee_maxv16sf3_round (rtx, rtx, rtx, rtx);
7187extern rtx gen_ieee_maxv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
7188extern rtx gen_ieee_maxv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7189extern rtx gen_ieee_minv16sf3 (rtx, rtx, rtx);
7190extern rtx gen_ieee_minv16sf3_round (rtx, rtx, rtx, rtx);
7191extern rtx gen_ieee_minv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
7192extern rtx gen_ieee_minv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7193extern rtx gen_ieee_maxv8sf3 (rtx, rtx, rtx);
7194static inline rtx gen_ieee_maxv8sf3_round (rtx, rtx, rtx, rtx);
7195static inline rtx
7196gen_ieee_maxv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7197{
7198 return 0;
7199}
7200extern rtx gen_ieee_maxv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
7201static inline rtx gen_ieee_maxv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7202static inline rtx
7203gen_ieee_maxv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7204{
7205 return 0;
7206}
7207extern rtx gen_ieee_minv8sf3 (rtx, rtx, rtx);
7208static inline rtx gen_ieee_minv8sf3_round (rtx, rtx, rtx, rtx);
7209static inline rtx
7210gen_ieee_minv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7211{
7212 return 0;
7213}
7214extern rtx gen_ieee_minv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
7215static inline rtx gen_ieee_minv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7216static inline rtx
7217gen_ieee_minv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7218{
7219 return 0;
7220}
7221extern rtx gen_ieee_maxv4sf3 (rtx, rtx, rtx);
7222static inline rtx gen_ieee_maxv4sf3_round (rtx, rtx, rtx, rtx);
7223static inline rtx
7224gen_ieee_maxv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7225{
7226 return 0;
7227}
7228extern rtx gen_ieee_maxv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7229static inline rtx gen_ieee_maxv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7230static inline rtx
7231gen_ieee_maxv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7232{
7233 return 0;
7234}
7235extern rtx gen_ieee_minv4sf3 (rtx, rtx, rtx);
7236static inline rtx gen_ieee_minv4sf3_round (rtx, rtx, rtx, rtx);
7237static inline rtx
7238gen_ieee_minv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7239{
7240 return 0;
7241}
7242extern rtx gen_ieee_minv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7243static inline rtx gen_ieee_minv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7244static inline rtx
7245gen_ieee_minv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7246{
7247 return 0;
7248}
7249extern rtx gen_ieee_maxv8df3 (rtx, rtx, rtx);
7250extern rtx gen_ieee_maxv8df3_round (rtx, rtx, rtx, rtx);
7251extern rtx gen_ieee_maxv8df3_mask (rtx, rtx, rtx, rtx, rtx);
7252extern rtx gen_ieee_maxv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7253extern rtx gen_ieee_minv8df3 (rtx, rtx, rtx);
7254extern rtx gen_ieee_minv8df3_round (rtx, rtx, rtx, rtx);
7255extern rtx gen_ieee_minv8df3_mask (rtx, rtx, rtx, rtx, rtx);
7256extern rtx gen_ieee_minv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7257extern rtx gen_ieee_maxv4df3 (rtx, rtx, rtx);
7258static inline rtx gen_ieee_maxv4df3_round (rtx, rtx, rtx, rtx);
7259static inline rtx
7260gen_ieee_maxv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7261{
7262 return 0;
7263}
7264extern rtx gen_ieee_maxv4df3_mask (rtx, rtx, rtx, rtx, rtx);
7265static inline rtx gen_ieee_maxv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7266static inline rtx
7267gen_ieee_maxv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7268{
7269 return 0;
7270}
7271extern rtx gen_ieee_minv4df3 (rtx, rtx, rtx);
7272static inline rtx gen_ieee_minv4df3_round (rtx, rtx, rtx, rtx);
7273static inline rtx
7274gen_ieee_minv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7275{
7276 return 0;
7277}
7278extern rtx gen_ieee_minv4df3_mask (rtx, rtx, rtx, rtx, rtx);
7279static inline rtx gen_ieee_minv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7280static inline rtx
7281gen_ieee_minv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7282{
7283 return 0;
7284}
7285extern rtx gen_ieee_maxv2df3 (rtx, rtx, rtx);
7286static inline rtx gen_ieee_maxv2df3_round (rtx, rtx, rtx, rtx);
7287static inline rtx
7288gen_ieee_maxv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7289{
7290 return 0;
7291}
7292extern rtx gen_ieee_maxv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7293static inline rtx gen_ieee_maxv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7294static inline rtx
7295gen_ieee_maxv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7296{
7297 return 0;
7298}
7299extern rtx gen_ieee_minv2df3 (rtx, rtx, rtx);
7300static inline rtx gen_ieee_minv2df3_round (rtx, rtx, rtx, rtx);
7301static inline rtx
7302gen_ieee_minv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
7303{
7304 return 0;
7305}
7306extern rtx gen_ieee_minv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7307static inline rtx gen_ieee_minv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7308static inline rtx
7309gen_ieee_minv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7310{
7311 return 0;
7312}
7313extern rtx gen_sse_vmsmaxv4sf3 (rtx, rtx, rtx);
7314extern rtx gen_sse_vmsmaxv4sf3_round (rtx, rtx, rtx, rtx);
7315extern rtx gen_sse_vmsminv4sf3 (rtx, rtx, rtx);
7316extern rtx gen_sse_vmsminv4sf3_round (rtx, rtx, rtx, rtx);
7317extern rtx gen_sse2_vmsmaxv2df3 (rtx, rtx, rtx);
7318extern rtx gen_sse2_vmsmaxv2df3_round (rtx, rtx, rtx, rtx);
7319extern rtx gen_sse2_vmsminv2df3 (rtx, rtx, rtx);
7320extern rtx gen_sse2_vmsminv2df3_round (rtx, rtx, rtx, rtx);
7321extern rtx gen_avx_addsubv4df3 (rtx, rtx, rtx);
7322extern rtx gen_sse3_addsubv2df3 (rtx, rtx, rtx);
7323extern rtx gen_avx_addsubv8sf3 (rtx, rtx, rtx);
7324extern rtx gen_sse3_addsubv4sf3 (rtx, rtx, rtx);
7325extern rtx gen_avx_haddv4df3 (rtx, rtx, rtx);
7326extern rtx gen_avx_hsubv4df3 (rtx, rtx, rtx);
7327extern rtx gen_sse3_hsubv2df3 (rtx, rtx, rtx);
7328extern rtx gen_avx_haddv8sf3 (rtx, rtx, rtx);
7329extern rtx gen_avx_hsubv8sf3 (rtx, rtx, rtx);
7330extern rtx gen_sse3_haddv4sf3 (rtx, rtx, rtx);
7331extern rtx gen_sse3_hsubv4sf3 (rtx, rtx, rtx);
7332extern rtx gen_reducepv16sf_mask (rtx, rtx, rtx, rtx, rtx);
7333extern rtx gen_reducepv8sf_mask (rtx, rtx, rtx, rtx, rtx);
7334extern rtx gen_reducepv4sf_mask (rtx, rtx, rtx, rtx, rtx);
7335extern rtx gen_reducepv8df_mask (rtx, rtx, rtx, rtx, rtx);
7336extern rtx gen_reducepv4df_mask (rtx, rtx, rtx, rtx, rtx);
7337extern rtx gen_reducepv2df_mask (rtx, rtx, rtx, rtx, rtx);
7338extern rtx gen_reducesv4sf (rtx, rtx, rtx, rtx);
7339extern rtx gen_reducesv2df (rtx, rtx, rtx, rtx);
7340extern rtx gen_avx_cmpv8sf3 (rtx, rtx, rtx, rtx);
7341extern rtx gen_avx_cmpv4sf3 (rtx, rtx, rtx, rtx);
7342extern rtx gen_avx_cmpv4df3 (rtx, rtx, rtx, rtx);
7343extern rtx gen_avx_cmpv2df3 (rtx, rtx, rtx, rtx);
7344extern rtx gen_avx_vmcmpv4sf3 (rtx, rtx, rtx, rtx);
7345extern rtx gen_avx_vmcmpv2df3 (rtx, rtx, rtx, rtx);
7346extern rtx gen_avx_maskcmpv8sf3 (rtx, rtx, rtx, rtx);
7347extern rtx gen_sse_maskcmpv4sf3 (rtx, rtx, rtx, rtx);
7348extern rtx gen_avx_maskcmpv4df3 (rtx, rtx, rtx, rtx);
7349extern rtx gen_sse2_maskcmpv2df3 (rtx, rtx, rtx, rtx);
7350extern rtx gen_sse_vmmaskcmpv4sf3 (rtx, rtx, rtx, rtx);
7351extern rtx gen_sse2_vmmaskcmpv2df3 (rtx, rtx, rtx, rtx);
7352extern rtx gen_avx512f_cmpv16si3 (rtx, rtx, rtx, rtx);
7353extern rtx gen_avx512f_cmpv16si3_mask (rtx, rtx, rtx, rtx, rtx);
7354extern rtx gen_avx512f_cmpv16si3_round (rtx, rtx, rtx, rtx, rtx);
7355extern rtx gen_avx512f_cmpv16si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7356extern rtx gen_avx512vl_cmpv8si3 (rtx, rtx, rtx, rtx);
7357extern rtx gen_avx512vl_cmpv8si3_mask (rtx, rtx, rtx, rtx, rtx);
7358static inline rtx gen_avx512vl_cmpv8si3_round (rtx, rtx, rtx, rtx, rtx);
7359static inline rtx
7360gen_avx512vl_cmpv8si3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7361{
7362 return 0;
7363}
7364static inline rtx gen_avx512vl_cmpv8si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7365static inline rtx
7366gen_avx512vl_cmpv8si3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7367{
7368 return 0;
7369}
7370extern rtx gen_avx512vl_cmpv4si3 (rtx, rtx, rtx, rtx);
7371extern rtx gen_avx512vl_cmpv4si3_mask (rtx, rtx, rtx, rtx, rtx);
7372static inline rtx gen_avx512vl_cmpv4si3_round (rtx, rtx, rtx, rtx, rtx);
7373static inline rtx
7374gen_avx512vl_cmpv4si3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7375{
7376 return 0;
7377}
7378static inline rtx gen_avx512vl_cmpv4si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7379static inline rtx
7380gen_avx512vl_cmpv4si3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7381{
7382 return 0;
7383}
7384extern rtx gen_avx512f_cmpv8di3 (rtx, rtx, rtx, rtx);
7385extern rtx gen_avx512f_cmpv8di3_mask (rtx, rtx, rtx, rtx, rtx);
7386extern rtx gen_avx512f_cmpv8di3_round (rtx, rtx, rtx, rtx, rtx);
7387extern rtx gen_avx512f_cmpv8di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7388extern rtx gen_avx512vl_cmpv4di3 (rtx, rtx, rtx, rtx);
7389extern rtx gen_avx512vl_cmpv4di3_mask (rtx, rtx, rtx, rtx, rtx);
7390static inline rtx gen_avx512vl_cmpv4di3_round (rtx, rtx, rtx, rtx, rtx);
7391static inline rtx
7392gen_avx512vl_cmpv4di3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7393{
7394 return 0;
7395}
7396static inline rtx gen_avx512vl_cmpv4di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7397static inline rtx
7398gen_avx512vl_cmpv4di3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7399{
7400 return 0;
7401}
7402extern rtx gen_avx512vl_cmpv2di3 (rtx, rtx, rtx, rtx);
7403extern rtx gen_avx512vl_cmpv2di3_mask (rtx, rtx, rtx, rtx, rtx);
7404static inline rtx gen_avx512vl_cmpv2di3_round (rtx, rtx, rtx, rtx, rtx);
7405static inline rtx
7406gen_avx512vl_cmpv2di3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7407{
7408 return 0;
7409}
7410static inline rtx gen_avx512vl_cmpv2di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7411static inline rtx
7412gen_avx512vl_cmpv2di3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7413{
7414 return 0;
7415}
7416extern rtx gen_avx512f_cmpv16sf3 (rtx, rtx, rtx, rtx);
7417extern rtx gen_avx512f_cmpv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
7418extern rtx gen_avx512f_cmpv16sf3_round (rtx, rtx, rtx, rtx, rtx);
7419extern rtx gen_avx512f_cmpv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7420extern rtx gen_avx512vl_cmpv8sf3 (rtx, rtx, rtx, rtx);
7421extern rtx gen_avx512vl_cmpv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
7422static inline rtx gen_avx512vl_cmpv8sf3_round (rtx, rtx, rtx, rtx, rtx);
7423static inline rtx
7424gen_avx512vl_cmpv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7425{
7426 return 0;
7427}
7428static inline rtx gen_avx512vl_cmpv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7429static inline rtx
7430gen_avx512vl_cmpv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7431{
7432 return 0;
7433}
7434extern rtx gen_avx512vl_cmpv4sf3 (rtx, rtx, rtx, rtx);
7435extern rtx gen_avx512vl_cmpv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7436static inline rtx gen_avx512vl_cmpv4sf3_round (rtx, rtx, rtx, rtx, rtx);
7437static inline rtx
7438gen_avx512vl_cmpv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7439{
7440 return 0;
7441}
7442static inline rtx gen_avx512vl_cmpv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7443static inline rtx
7444gen_avx512vl_cmpv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7445{
7446 return 0;
7447}
7448extern rtx gen_avx512f_cmpv8df3 (rtx, rtx, rtx, rtx);
7449extern rtx gen_avx512f_cmpv8df3_mask (rtx, rtx, rtx, rtx, rtx);
7450extern rtx gen_avx512f_cmpv8df3_round (rtx, rtx, rtx, rtx, rtx);
7451extern rtx gen_avx512f_cmpv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7452extern rtx gen_avx512vl_cmpv4df3 (rtx, rtx, rtx, rtx);
7453extern rtx gen_avx512vl_cmpv4df3_mask (rtx, rtx, rtx, rtx, rtx);
7454static inline rtx gen_avx512vl_cmpv4df3_round (rtx, rtx, rtx, rtx, rtx);
7455static inline rtx
7456gen_avx512vl_cmpv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7457{
7458 return 0;
7459}
7460static inline rtx gen_avx512vl_cmpv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7461static inline rtx
7462gen_avx512vl_cmpv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7463{
7464 return 0;
7465}
7466extern rtx gen_avx512vl_cmpv2df3 (rtx, rtx, rtx, rtx);
7467extern rtx gen_avx512vl_cmpv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7468static inline rtx gen_avx512vl_cmpv2df3_round (rtx, rtx, rtx, rtx, rtx);
7469static inline rtx
7470gen_avx512vl_cmpv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
7471{
7472 return 0;
7473}
7474static inline rtx gen_avx512vl_cmpv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7475static inline rtx
7476gen_avx512vl_cmpv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7477{
7478 return 0;
7479}
7480extern rtx gen_avx512bw_cmpv64qi3 (rtx, rtx, rtx, rtx);
7481extern rtx gen_avx512bw_cmpv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
7482extern rtx gen_avx512vl_cmpv16qi3 (rtx, rtx, rtx, rtx);
7483extern rtx gen_avx512vl_cmpv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
7484extern rtx gen_avx512vl_cmpv32qi3 (rtx, rtx, rtx, rtx);
7485extern rtx gen_avx512vl_cmpv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
7486extern rtx gen_avx512bw_cmpv32hi3 (rtx, rtx, rtx, rtx);
7487extern rtx gen_avx512bw_cmpv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
7488extern rtx gen_avx512vl_cmpv16hi3 (rtx, rtx, rtx, rtx);
7489extern rtx gen_avx512vl_cmpv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
7490extern rtx gen_avx512vl_cmpv8hi3 (rtx, rtx, rtx, rtx);
7491extern rtx gen_avx512vl_cmpv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
7492extern rtx gen_avx512bw_ucmpv64qi3 (rtx, rtx, rtx, rtx);
7493extern rtx gen_avx512bw_ucmpv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
7494extern rtx gen_avx512vl_ucmpv16qi3 (rtx, rtx, rtx, rtx);
7495extern rtx gen_avx512vl_ucmpv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
7496extern rtx gen_avx512vl_ucmpv32qi3 (rtx, rtx, rtx, rtx);
7497extern rtx gen_avx512vl_ucmpv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
7498extern rtx gen_avx512bw_ucmpv32hi3 (rtx, rtx, rtx, rtx);
7499extern rtx gen_avx512bw_ucmpv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
7500extern rtx gen_avx512vl_ucmpv16hi3 (rtx, rtx, rtx, rtx);
7501extern rtx gen_avx512vl_ucmpv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
7502extern rtx gen_avx512vl_ucmpv8hi3 (rtx, rtx, rtx, rtx);
7503extern rtx gen_avx512vl_ucmpv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
7504extern rtx gen_avx512f_ucmpv16si3 (rtx, rtx, rtx, rtx);
7505extern rtx gen_avx512f_ucmpv16si3_mask (rtx, rtx, rtx, rtx, rtx);
7506extern rtx gen_avx512vl_ucmpv8si3 (rtx, rtx, rtx, rtx);
7507extern rtx gen_avx512vl_ucmpv8si3_mask (rtx, rtx, rtx, rtx, rtx);
7508extern rtx gen_avx512vl_ucmpv4si3 (rtx, rtx, rtx, rtx);
7509extern rtx gen_avx512vl_ucmpv4si3_mask (rtx, rtx, rtx, rtx, rtx);
7510extern rtx gen_avx512f_ucmpv8di3 (rtx, rtx, rtx, rtx);
7511extern rtx gen_avx512f_ucmpv8di3_mask (rtx, rtx, rtx, rtx, rtx);
7512extern rtx gen_avx512vl_ucmpv4di3 (rtx, rtx, rtx, rtx);
7513extern rtx gen_avx512vl_ucmpv4di3_mask (rtx, rtx, rtx, rtx, rtx);
7514extern rtx gen_avx512vl_ucmpv2di3 (rtx, rtx, rtx, rtx);
7515extern rtx gen_avx512vl_ucmpv2di3_mask (rtx, rtx, rtx, rtx, rtx);
7516extern rtx gen_avx512f_vmcmpv4sf3 (rtx, rtx, rtx, rtx);
7517extern rtx gen_avx512f_vmcmpv4sf3_round (rtx, rtx, rtx, rtx, rtx);
7518extern rtx gen_avx512f_vmcmpv2df3 (rtx, rtx, rtx, rtx);
7519extern rtx gen_avx512f_vmcmpv2df3_round (rtx, rtx, rtx, rtx, rtx);
7520extern rtx gen_avx512f_vmcmpv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7521extern rtx gen_avx512f_vmcmpv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7522extern rtx gen_avx512f_vmcmpv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7523extern rtx gen_avx512f_vmcmpv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7524extern rtx gen_avx512f_maskcmpv16sf3 (rtx, rtx, rtx, rtx);
7525extern rtx gen_avx512f_maskcmpv8sf3 (rtx, rtx, rtx, rtx);
7526extern rtx gen_avx512f_maskcmpv4sf3 (rtx, rtx, rtx, rtx);
7527extern rtx gen_avx512f_maskcmpv8df3 (rtx, rtx, rtx, rtx);
7528extern rtx gen_avx512f_maskcmpv4df3 (rtx, rtx, rtx, rtx);
7529extern rtx gen_avx512f_maskcmpv2df3 (rtx, rtx, rtx, rtx);
7530extern rtx gen_sse_comi (rtx, rtx);
7531extern rtx gen_sse_comi_round (rtx, rtx, rtx);
7532extern rtx gen_sse2_comi (rtx, rtx);
7533extern rtx gen_sse2_comi_round (rtx, rtx, rtx);
7534extern rtx gen_sse_ucomi (rtx, rtx);
7535extern rtx gen_sse_ucomi_round (rtx, rtx, rtx);
7536extern rtx gen_sse2_ucomi (rtx, rtx);
7537extern rtx gen_sse2_ucomi_round (rtx, rtx, rtx);
7538extern rtx gen_avx_andnotv8sf3 (rtx, rtx, rtx);
7539extern rtx gen_avx_andnotv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
7540extern rtx gen_sse_andnotv4sf3 (rtx, rtx, rtx);
7541extern rtx gen_sse_andnotv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
7542extern rtx gen_avx_andnotv4df3 (rtx, rtx, rtx);
7543extern rtx gen_avx_andnotv4df3_mask (rtx, rtx, rtx, rtx, rtx);
7544extern rtx gen_sse2_andnotv2df3 (rtx, rtx, rtx);
7545extern rtx gen_sse2_andnotv2df3_mask (rtx, rtx, rtx, rtx, rtx);
7546extern rtx gen_avx512f_andnotv16sf3 (rtx, rtx, rtx);
7547extern rtx gen_avx512f_andnotv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
7548extern rtx gen_avx512f_andnotv8df3 (rtx, rtx, rtx);
7549extern rtx gen_avx512f_andnotv8df3_mask (rtx, rtx, rtx, rtx, rtx);
7550extern rtx gen_fma_fmadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7551extern rtx gen_fma_fmadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7552extern rtx gen_fma_fmadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7553static inline rtx gen_fma_fmadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7554static inline rtx
7555gen_fma_fmadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7556{
7557 return 0;
7558}
7559extern rtx gen_fma_fmadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7560static inline rtx gen_fma_fmadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7561static inline rtx
7562gen_fma_fmadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7563{
7564 return 0;
7565}
7566extern rtx gen_fma_fmadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7567extern rtx gen_fma_fmadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7568extern rtx gen_fma_fmadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7569static inline rtx gen_fma_fmadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7570static inline rtx
7571gen_fma_fmadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7572{
7573 return 0;
7574}
7575extern rtx gen_fma_fmadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7576static inline rtx gen_fma_fmadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7577static inline rtx
7578gen_fma_fmadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7579{
7580 return 0;
7581}
7582extern rtx gen_avx512f_fmadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7583extern rtx gen_avx512f_fmadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7584extern rtx gen_avx512vl_fmadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7585static inline rtx gen_avx512vl_fmadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7586static inline rtx
7587gen_avx512vl_fmadd_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7588{
7589 return 0;
7590}
7591extern rtx gen_avx512vl_fmadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7592static inline rtx gen_avx512vl_fmadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7593static inline rtx
7594gen_avx512vl_fmadd_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7595{
7596 return 0;
7597}
7598extern rtx gen_avx512f_fmadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7599extern rtx gen_avx512f_fmadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7600extern rtx gen_avx512vl_fmadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7601static inline rtx gen_avx512vl_fmadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7602static inline rtx
7603gen_avx512vl_fmadd_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7604{
7605 return 0;
7606}
7607extern rtx gen_avx512vl_fmadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7608static inline rtx gen_avx512vl_fmadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7609static inline rtx
7610gen_avx512vl_fmadd_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7611{
7612 return 0;
7613}
7614extern rtx gen_avx512f_fmadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7615extern rtx gen_avx512f_fmadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7616extern rtx gen_avx512vl_fmadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7617extern rtx gen_avx512vl_fmadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7618extern rtx gen_avx512vl_fmadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7619extern rtx gen_avx512vl_fmadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7620extern rtx gen_avx512f_fmadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7621extern rtx gen_avx512f_fmadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7622extern rtx gen_avx512vl_fmadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7623extern rtx gen_avx512vl_fmadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7624extern rtx gen_avx512vl_fmadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7625extern rtx gen_avx512vl_fmadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7626extern rtx gen_fma_fmsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7627extern rtx gen_fma_fmsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7628extern rtx gen_fma_fmsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7629static inline rtx gen_fma_fmsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7630static inline rtx
7631gen_fma_fmsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7632{
7633 return 0;
7634}
7635extern rtx gen_fma_fmsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7636static inline rtx gen_fma_fmsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7637static inline rtx
7638gen_fma_fmsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7639{
7640 return 0;
7641}
7642extern rtx gen_fma_fmsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7643extern rtx gen_fma_fmsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7644extern rtx gen_fma_fmsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7645static inline rtx gen_fma_fmsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7646static inline rtx
7647gen_fma_fmsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7648{
7649 return 0;
7650}
7651extern rtx gen_fma_fmsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7652static inline rtx gen_fma_fmsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7653static inline rtx
7654gen_fma_fmsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7655{
7656 return 0;
7657}
7658extern rtx gen_avx512f_fmsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7659extern rtx gen_avx512f_fmsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7660extern rtx gen_avx512vl_fmsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7661extern rtx gen_avx512vl_fmsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7662extern rtx gen_avx512vl_fmsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7663extern rtx gen_avx512vl_fmsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7664extern rtx gen_avx512f_fmsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7665extern rtx gen_avx512f_fmsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7666extern rtx gen_avx512vl_fmsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7667extern rtx gen_avx512vl_fmsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7668extern rtx gen_avx512vl_fmsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7669extern rtx gen_avx512vl_fmsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7670extern rtx gen_avx512f_fmsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7671extern rtx gen_avx512f_fmsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7672extern rtx gen_avx512vl_fmsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7673static inline rtx gen_avx512vl_fmsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7674static inline rtx
7675gen_avx512vl_fmsub_v8sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7676{
7677 return 0;
7678}
7679extern rtx gen_avx512vl_fmsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7680static inline rtx gen_avx512vl_fmsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7681static inline rtx
7682gen_avx512vl_fmsub_v4sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7683{
7684 return 0;
7685}
7686extern rtx gen_avx512f_fmsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7687extern rtx gen_avx512f_fmsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7688extern rtx gen_avx512vl_fmsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7689static inline rtx gen_avx512vl_fmsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7690static inline rtx
7691gen_avx512vl_fmsub_v4df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7692{
7693 return 0;
7694}
7695extern rtx gen_avx512vl_fmsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7696static inline rtx gen_avx512vl_fmsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7697static inline rtx
7698gen_avx512vl_fmsub_v2df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7699{
7700 return 0;
7701}
7702extern rtx gen_fma_fnmadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7703extern rtx gen_fma_fnmadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7704extern rtx gen_fma_fnmadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7705static inline rtx gen_fma_fnmadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7706static inline rtx
7707gen_fma_fnmadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7708{
7709 return 0;
7710}
7711extern rtx gen_fma_fnmadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7712static inline rtx gen_fma_fnmadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7713static inline rtx
7714gen_fma_fnmadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7715{
7716 return 0;
7717}
7718extern rtx gen_fma_fnmadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7719extern rtx gen_fma_fnmadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7720extern rtx gen_fma_fnmadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7721static inline rtx gen_fma_fnmadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7722static inline rtx
7723gen_fma_fnmadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7724{
7725 return 0;
7726}
7727extern rtx gen_fma_fnmadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7728static inline rtx gen_fma_fnmadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7729static inline rtx
7730gen_fma_fnmadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7731{
7732 return 0;
7733}
7734extern rtx gen_avx512f_fnmadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7735extern rtx gen_avx512f_fnmadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7736extern rtx gen_avx512vl_fnmadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7737static inline rtx gen_avx512vl_fnmadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7738static inline rtx
7739gen_avx512vl_fnmadd_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7740{
7741 return 0;
7742}
7743extern rtx gen_avx512vl_fnmadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7744static inline rtx gen_avx512vl_fnmadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7745static inline rtx
7746gen_avx512vl_fnmadd_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7747{
7748 return 0;
7749}
7750extern rtx gen_avx512f_fnmadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7751extern rtx gen_avx512f_fnmadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7752extern rtx gen_avx512vl_fnmadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7753static inline rtx gen_avx512vl_fnmadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7754static inline rtx
7755gen_avx512vl_fnmadd_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7756{
7757 return 0;
7758}
7759extern rtx gen_avx512vl_fnmadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7760static inline rtx gen_avx512vl_fnmadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7761static inline rtx
7762gen_avx512vl_fnmadd_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7763{
7764 return 0;
7765}
7766extern rtx gen_avx512f_fnmadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7767extern rtx gen_avx512f_fnmadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7768extern rtx gen_avx512vl_fnmadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7769static inline rtx gen_avx512vl_fnmadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7770static inline rtx
7771gen_avx512vl_fnmadd_v8sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7772{
7773 return 0;
7774}
7775extern rtx gen_avx512vl_fnmadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7776static inline rtx gen_avx512vl_fnmadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7777static inline rtx
7778gen_avx512vl_fnmadd_v4sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7779{
7780 return 0;
7781}
7782extern rtx gen_avx512f_fnmadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7783extern rtx gen_avx512f_fnmadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7784extern rtx gen_avx512vl_fnmadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7785static inline rtx gen_avx512vl_fnmadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7786static inline rtx
7787gen_avx512vl_fnmadd_v4df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7788{
7789 return 0;
7790}
7791extern rtx gen_avx512vl_fnmadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7792static inline rtx gen_avx512vl_fnmadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7793static inline rtx
7794gen_avx512vl_fnmadd_v2df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7795{
7796 return 0;
7797}
7798extern rtx gen_fma_fnmsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7799extern rtx gen_fma_fnmsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7800extern rtx gen_fma_fnmsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7801static inline rtx gen_fma_fnmsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7802static inline rtx
7803gen_fma_fnmsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7804{
7805 return 0;
7806}
7807extern rtx gen_fma_fnmsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7808static inline rtx gen_fma_fnmsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7809static inline rtx
7810gen_fma_fnmsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7811{
7812 return 0;
7813}
7814extern rtx gen_fma_fnmsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7815extern rtx gen_fma_fnmsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7816extern rtx gen_fma_fnmsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7817static inline rtx gen_fma_fnmsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7818static inline rtx
7819gen_fma_fnmsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7820{
7821 return 0;
7822}
7823extern rtx gen_fma_fnmsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7824static inline rtx gen_fma_fnmsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7825static inline rtx
7826gen_fma_fnmsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7827{
7828 return 0;
7829}
7830extern rtx gen_avx512f_fnmsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7831extern rtx gen_avx512f_fnmsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7832extern rtx gen_avx512vl_fnmsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7833static inline rtx gen_avx512vl_fnmsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7834static inline rtx
7835gen_avx512vl_fnmsub_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7836{
7837 return 0;
7838}
7839extern rtx gen_avx512vl_fnmsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7840static inline rtx gen_avx512vl_fnmsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7841static inline rtx
7842gen_avx512vl_fnmsub_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7843{
7844 return 0;
7845}
7846extern rtx gen_avx512f_fnmsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7847extern rtx gen_avx512f_fnmsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7848extern rtx gen_avx512vl_fnmsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7849static inline rtx gen_avx512vl_fnmsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7850static inline rtx
7851gen_avx512vl_fnmsub_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7852{
7853 return 0;
7854}
7855extern rtx gen_avx512vl_fnmsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7856static inline rtx gen_avx512vl_fnmsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7857static inline rtx
7858gen_avx512vl_fnmsub_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
7859{
7860 return 0;
7861}
7862extern rtx gen_avx512f_fnmsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7863extern rtx gen_avx512f_fnmsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7864extern rtx gen_avx512vl_fnmsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7865extern rtx gen_avx512vl_fnmsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7866extern rtx gen_avx512vl_fnmsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7867extern rtx gen_avx512vl_fnmsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7868extern rtx gen_avx512f_fnmsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7869extern rtx gen_avx512f_fnmsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7870extern rtx gen_avx512vl_fnmsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7871extern rtx gen_avx512vl_fnmsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7872extern rtx gen_avx512vl_fnmsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7873extern rtx gen_avx512vl_fnmsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7874extern rtx gen_fma_fmaddsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7875extern rtx gen_fma_fmaddsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7876extern rtx gen_fma_fmaddsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7877static inline rtx gen_fma_fmaddsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7878static inline rtx
7879gen_fma_fmaddsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7880{
7881 return 0;
7882}
7883extern rtx gen_fma_fmaddsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7884static inline rtx gen_fma_fmaddsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7885static inline rtx
7886gen_fma_fmaddsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7887{
7888 return 0;
7889}
7890extern rtx gen_fma_fmaddsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7891extern rtx gen_fma_fmaddsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7892extern rtx gen_fma_fmaddsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7893static inline rtx gen_fma_fmaddsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7894static inline rtx
7895gen_fma_fmaddsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7896{
7897 return 0;
7898}
7899extern rtx gen_fma_fmaddsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7900static inline rtx gen_fma_fmaddsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7901static inline rtx
7902gen_fma_fmaddsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7903{
7904 return 0;
7905}
7906extern rtx gen_avx512f_fmaddsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7907extern rtx gen_avx512f_fmaddsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7908extern rtx gen_avx512vl_fmaddsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7909extern rtx gen_avx512vl_fmaddsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7910extern rtx gen_avx512vl_fmaddsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7911extern rtx gen_avx512vl_fmaddsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7912extern rtx gen_avx512f_fmaddsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7913extern rtx gen_avx512f_fmaddsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7914extern rtx gen_avx512vl_fmaddsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7915extern rtx gen_avx512vl_fmaddsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7916extern rtx gen_avx512vl_fmaddsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7917extern rtx gen_avx512vl_fmaddsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7918extern rtx gen_avx512f_fmaddsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7919extern rtx gen_avx512f_fmaddsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7920extern rtx gen_avx512vl_fmaddsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7921extern rtx gen_avx512vl_fmaddsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7922extern rtx gen_avx512vl_fmaddsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7923extern rtx gen_avx512vl_fmaddsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7924extern rtx gen_avx512f_fmaddsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7925extern rtx gen_avx512f_fmaddsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7926extern rtx gen_avx512vl_fmaddsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7927extern rtx gen_avx512vl_fmaddsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7928extern rtx gen_avx512vl_fmaddsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7929extern rtx gen_avx512vl_fmaddsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7930extern rtx gen_fma_fmsubadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7931extern rtx gen_fma_fmsubadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7932extern rtx gen_fma_fmsubadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7933static inline rtx gen_fma_fmsubadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7934static inline rtx
7935gen_fma_fmsubadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7936{
7937 return 0;
7938}
7939extern rtx gen_fma_fmsubadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7940static inline rtx gen_fma_fmsubadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7941static inline rtx
7942gen_fma_fmsubadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7943{
7944 return 0;
7945}
7946extern rtx gen_fma_fmsubadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7947extern rtx gen_fma_fmsubadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7948extern rtx gen_fma_fmsubadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7949static inline rtx gen_fma_fmsubadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7950static inline rtx
7951gen_fma_fmsubadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7952{
7953 return 0;
7954}
7955extern rtx gen_fma_fmsubadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
7956static inline rtx gen_fma_fmsubadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
7957static inline rtx
7958gen_fma_fmsubadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
7959{
7960 return 0;
7961}
7962extern rtx gen_avx512f_fmsubadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
7963extern rtx gen_avx512f_fmsubadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7964extern rtx gen_avx512vl_fmsubadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
7965extern rtx gen_avx512vl_fmsubadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7966extern rtx gen_avx512vl_fmsubadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
7967extern rtx gen_avx512vl_fmsubadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7968extern rtx gen_avx512f_fmsubadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
7969extern rtx gen_avx512f_fmsubadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7970extern rtx gen_avx512vl_fmsubadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
7971extern rtx gen_avx512vl_fmsubadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7972extern rtx gen_avx512vl_fmsubadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
7973extern rtx gen_avx512vl_fmsubadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
7974extern rtx gen_avx512f_fmsubadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7975extern rtx gen_avx512f_fmsubadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7976extern rtx gen_avx512vl_fmsubadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7977extern rtx gen_avx512vl_fmsubadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7978extern rtx gen_avx512vl_fmsubadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
7979extern rtx gen_avx512vl_fmsubadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7980extern rtx gen_avx512f_fmsubadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
7981extern rtx gen_avx512f_fmsubadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7982extern rtx gen_avx512vl_fmsubadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
7983extern rtx gen_avx512vl_fmsubadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7984extern rtx gen_avx512vl_fmsubadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
7985extern rtx gen_avx512vl_fmsubadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
7986extern rtx gen_sse_cvtpi2ps (rtx, rtx, rtx);
7987extern rtx gen_sse_cvtps2pi (rtx, rtx);
7988extern rtx gen_sse_cvttps2pi (rtx, rtx);
7989extern rtx gen_sse_cvtsi2ss (rtx, rtx, rtx);
7990extern rtx gen_sse_cvtsi2ss_round (rtx, rtx, rtx, rtx);
7991extern rtx gen_sse_cvtsi2ssq (rtx, rtx, rtx);
7992extern rtx gen_sse_cvtsi2ssq_round (rtx, rtx, rtx, rtx);
7993extern rtx gen_sse_cvtss2si (rtx, rtx);
7994extern rtx gen_sse_cvtss2si_round (rtx, rtx, rtx);
7995extern rtx gen_sse_cvtss2si_2 (rtx, rtx);
7996extern rtx gen_sse_cvtss2siq (rtx, rtx);
7997extern rtx gen_sse_cvtss2siq_round (rtx, rtx, rtx);
7998extern rtx gen_sse_cvtss2siq_2 (rtx, rtx);
7999extern rtx gen_sse_cvttss2si (rtx, rtx);
8000extern rtx gen_sse_cvttss2si_round (rtx, rtx, rtx);
8001extern rtx gen_sse_cvttss2siq (rtx, rtx);
8002extern rtx gen_sse_cvttss2siq_round (rtx, rtx, rtx);
8003extern rtx gen_cvtusi2ss32 (rtx, rtx, rtx);
8004extern rtx gen_cvtusi2ss32_round (rtx, rtx, rtx, rtx);
8005extern rtx gen_cvtusi2sd32 (rtx, rtx, rtx);
8006static inline rtx gen_cvtusi2sd32_round (rtx, rtx, rtx, rtx);
8007static inline rtx
8008gen_cvtusi2sd32_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
8009{
8010 return 0;
8011}
8012extern rtx gen_cvtusi2ss64 (rtx, rtx, rtx);
8013extern rtx gen_cvtusi2ss64_round (rtx, rtx, rtx, rtx);
8014extern rtx gen_cvtusi2sd64 (rtx, rtx, rtx);
8015extern rtx gen_cvtusi2sd64_round (rtx, rtx, rtx, rtx);
8016extern rtx gen_floatv16siv16sf2 (rtx, rtx);
8017extern rtx gen_floatv16siv16sf2_round (rtx, rtx, rtx);
8018extern rtx gen_floatv16siv16sf2_mask (rtx, rtx, rtx, rtx);
8019extern rtx gen_floatv16siv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8020extern rtx gen_floatv8siv8sf2 (rtx, rtx);
8021static inline rtx gen_floatv8siv8sf2_round (rtx, rtx, rtx);
8022static inline rtx
8023gen_floatv8siv8sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8024{
8025 return 0;
8026}
8027extern rtx gen_floatv8siv8sf2_mask (rtx, rtx, rtx, rtx);
8028static inline rtx gen_floatv8siv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8029static inline rtx
8030gen_floatv8siv8sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8031{
8032 return 0;
8033}
8034extern rtx gen_floatv4siv4sf2 (rtx, rtx);
8035static inline rtx gen_floatv4siv4sf2_round (rtx, rtx, rtx);
8036static inline rtx
8037gen_floatv4siv4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8038{
8039 return 0;
8040}
8041extern rtx gen_floatv4siv4sf2_mask (rtx, rtx, rtx, rtx);
8042static inline rtx gen_floatv4siv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8043static inline rtx
8044gen_floatv4siv4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8045{
8046 return 0;
8047}
8048extern rtx gen_ufloatv16siv16sf2 (rtx, rtx);
8049extern rtx gen_ufloatv16siv16sf2_round (rtx, rtx, rtx);
8050extern rtx gen_ufloatv16siv16sf2_mask (rtx, rtx, rtx, rtx);
8051extern rtx gen_ufloatv16siv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8052extern rtx gen_ufloatv8siv8sf2 (rtx, rtx);
8053extern rtx gen_ufloatv8siv8sf2_round (rtx, rtx, rtx);
8054extern rtx gen_ufloatv8siv8sf2_mask (rtx, rtx, rtx, rtx);
8055extern rtx gen_ufloatv8siv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8056extern rtx gen_ufloatv4siv4sf2 (rtx, rtx);
8057extern rtx gen_ufloatv4siv4sf2_round (rtx, rtx, rtx);
8058extern rtx gen_ufloatv4siv4sf2_mask (rtx, rtx, rtx, rtx);
8059extern rtx gen_ufloatv4siv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8060extern rtx gen_avx_fix_notruncv8sfv8si (rtx, rtx);
8061extern rtx gen_avx_fix_notruncv8sfv8si_mask (rtx, rtx, rtx, rtx);
8062extern rtx gen_sse2_fix_notruncv4sfv4si (rtx, rtx);
8063extern rtx gen_sse2_fix_notruncv4sfv4si_mask (rtx, rtx, rtx, rtx);
8064extern rtx gen_avx512f_fix_notruncv16sfv16si (rtx, rtx);
8065extern rtx gen_avx512f_fix_notruncv16sfv16si_round (rtx, rtx, rtx);
8066extern rtx gen_avx512f_fix_notruncv16sfv16si_mask (rtx, rtx, rtx, rtx);
8067extern rtx gen_avx512f_fix_notruncv16sfv16si_mask_round (rtx, rtx, rtx, rtx, rtx);
8068extern rtx gen_avx512f_ufix_notruncv16sfv16si_mask (rtx, rtx, rtx, rtx);
8069extern rtx gen_avx512f_ufix_notruncv16sfv16si_mask_round (rtx, rtx, rtx, rtx, rtx);
8070extern rtx gen_avx512vl_ufix_notruncv8sfv8si_mask (rtx, rtx, rtx, rtx);
8071extern rtx gen_avx512vl_ufix_notruncv8sfv8si_mask_round (rtx, rtx, rtx, rtx, rtx);
8072extern rtx gen_avx512vl_ufix_notruncv4sfv4si_mask (rtx, rtx, rtx, rtx);
8073extern rtx gen_avx512vl_ufix_notruncv4sfv4si_mask_round (rtx, rtx, rtx, rtx, rtx);
8074extern rtx gen_avx512dq_cvtps2qqv8di_mask (rtx, rtx, rtx, rtx);
8075extern rtx gen_avx512dq_cvtps2qqv8di_mask_round (rtx, rtx, rtx, rtx, rtx);
8076extern rtx gen_avx512dq_cvtps2qqv4di_mask (rtx, rtx, rtx, rtx);
8077static inline rtx gen_avx512dq_cvtps2qqv4di_mask_round (rtx, rtx, rtx, rtx, rtx);
8078static inline rtx
8079gen_avx512dq_cvtps2qqv4di_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8080{
8081 return 0;
8082}
8083extern rtx gen_avx512dq_cvtps2qqv2di_mask (rtx, rtx, rtx, rtx);
8084extern rtx gen_avx512dq_cvtps2uqqv8di_mask (rtx, rtx, rtx, rtx);
8085extern rtx gen_avx512dq_cvtps2uqqv8di_mask_round (rtx, rtx, rtx, rtx, rtx);
8086extern rtx gen_avx512dq_cvtps2uqqv4di_mask (rtx, rtx, rtx, rtx);
8087static inline rtx gen_avx512dq_cvtps2uqqv4di_mask_round (rtx, rtx, rtx, rtx, rtx);
8088static inline rtx
8089gen_avx512dq_cvtps2uqqv4di_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8090{
8091 return 0;
8092}
8093extern rtx gen_avx512dq_cvtps2uqqv2di_mask (rtx, rtx, rtx, rtx);
8094extern rtx gen_fix_truncv16sfv16si2 (rtx, rtx);
8095extern rtx gen_fix_truncv16sfv16si2_round (rtx, rtx, rtx);
8096extern rtx gen_fix_truncv16sfv16si2_mask (rtx, rtx, rtx, rtx);
8097extern rtx gen_fix_truncv16sfv16si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8098extern rtx gen_ufix_truncv16sfv16si2 (rtx, rtx);
8099extern rtx gen_ufix_truncv16sfv16si2_round (rtx, rtx, rtx);
8100extern rtx gen_ufix_truncv16sfv16si2_mask (rtx, rtx, rtx, rtx);
8101extern rtx gen_ufix_truncv16sfv16si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8102extern rtx gen_fix_truncv8sfv8si2 (rtx, rtx);
8103extern rtx gen_fix_truncv8sfv8si2_mask (rtx, rtx, rtx, rtx);
8104extern rtx gen_fix_truncv4sfv4si2 (rtx, rtx);
8105extern rtx gen_fix_truncv4sfv4si2_mask (rtx, rtx, rtx, rtx);
8106extern rtx gen_sse2_cvtpi2pd (rtx, rtx);
8107extern rtx gen_sse2_cvtpd2pi (rtx, rtx);
8108extern rtx gen_sse2_cvttpd2pi (rtx, rtx);
8109extern rtx gen_sse2_cvtsi2sd (rtx, rtx, rtx);
8110extern rtx gen_sse2_cvtsi2sdq (rtx, rtx, rtx);
8111extern rtx gen_sse2_cvtsi2sdq_round (rtx, rtx, rtx, rtx);
8112extern rtx gen_avx512f_vcvtss2usi (rtx, rtx);
8113extern rtx gen_avx512f_vcvtss2usi_round (rtx, rtx, rtx);
8114extern rtx gen_avx512f_vcvtss2usiq (rtx, rtx);
8115extern rtx gen_avx512f_vcvtss2usiq_round (rtx, rtx, rtx);
8116extern rtx gen_avx512f_vcvttss2usi (rtx, rtx);
8117extern rtx gen_avx512f_vcvttss2usi_round (rtx, rtx, rtx);
8118extern rtx gen_avx512f_vcvttss2usiq (rtx, rtx);
8119extern rtx gen_avx512f_vcvttss2usiq_round (rtx, rtx, rtx);
8120extern rtx gen_avx512f_vcvtsd2usi (rtx, rtx);
8121extern rtx gen_avx512f_vcvtsd2usi_round (rtx, rtx, rtx);
8122extern rtx gen_avx512f_vcvtsd2usiq (rtx, rtx);
8123extern rtx gen_avx512f_vcvtsd2usiq_round (rtx, rtx, rtx);
8124extern rtx gen_avx512f_vcvttsd2usi (rtx, rtx);
8125extern rtx gen_avx512f_vcvttsd2usi_round (rtx, rtx, rtx);
8126extern rtx gen_avx512f_vcvttsd2usiq (rtx, rtx);
8127extern rtx gen_avx512f_vcvttsd2usiq_round (rtx, rtx, rtx);
8128extern rtx gen_sse2_cvtsd2si (rtx, rtx);
8129extern rtx gen_sse2_cvtsd2si_round (rtx, rtx, rtx);
8130extern rtx gen_sse2_cvtsd2si_2 (rtx, rtx);
8131extern rtx gen_sse2_cvtsd2siq (rtx, rtx);
8132extern rtx gen_sse2_cvtsd2siq_round (rtx, rtx, rtx);
8133extern rtx gen_sse2_cvtsd2siq_2 (rtx, rtx);
8134extern rtx gen_sse2_cvttsd2si (rtx, rtx);
8135extern rtx gen_sse2_cvttsd2si_round (rtx, rtx, rtx);
8136extern rtx gen_sse2_cvttsd2siq (rtx, rtx);
8137extern rtx gen_sse2_cvttsd2siq_round (rtx, rtx, rtx);
8138extern rtx gen_floatv8siv8df2 (rtx, rtx);
8139extern rtx gen_floatv8siv8df2_mask (rtx, rtx, rtx, rtx);
8140extern rtx gen_floatv4siv4df2 (rtx, rtx);
8141extern rtx gen_floatv4siv4df2_mask (rtx, rtx, rtx, rtx);
8142extern rtx gen_floatv8div8df2 (rtx, rtx);
8143extern rtx gen_floatv8div8df2_round (rtx, rtx, rtx);
8144extern rtx gen_floatv8div8df2_mask (rtx, rtx, rtx, rtx);
8145extern rtx gen_floatv8div8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8146extern rtx gen_ufloatv8div8df2 (rtx, rtx);
8147extern rtx gen_ufloatv8div8df2_round (rtx, rtx, rtx);
8148extern rtx gen_ufloatv8div8df2_mask (rtx, rtx, rtx, rtx);
8149extern rtx gen_ufloatv8div8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8150extern rtx gen_floatv4div4df2 (rtx, rtx);
8151extern rtx gen_floatv4div4df2_round (rtx, rtx, rtx);
8152extern rtx gen_floatv4div4df2_mask (rtx, rtx, rtx, rtx);
8153extern rtx gen_floatv4div4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8154extern rtx gen_ufloatv4div4df2 (rtx, rtx);
8155extern rtx gen_ufloatv4div4df2_round (rtx, rtx, rtx);
8156extern rtx gen_ufloatv4div4df2_mask (rtx, rtx, rtx, rtx);
8157extern rtx gen_ufloatv4div4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8158extern rtx gen_floatv2div2df2 (rtx, rtx);
8159extern rtx gen_floatv2div2df2_round (rtx, rtx, rtx);
8160extern rtx gen_floatv2div2df2_mask (rtx, rtx, rtx, rtx);
8161extern rtx gen_floatv2div2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8162extern rtx gen_ufloatv2div2df2 (rtx, rtx);
8163extern rtx gen_ufloatv2div2df2_round (rtx, rtx, rtx);
8164extern rtx gen_ufloatv2div2df2_mask (rtx, rtx, rtx, rtx);
8165extern rtx gen_ufloatv2div2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
8166extern rtx gen_floatv8div8sf2 (rtx, rtx);
8167extern rtx gen_floatv8div8sf2_round (rtx, rtx, rtx);
8168extern rtx gen_floatv8div8sf2_mask (rtx, rtx, rtx, rtx);
8169extern rtx gen_floatv8div8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8170extern rtx gen_ufloatv8div8sf2 (rtx, rtx);
8171extern rtx gen_ufloatv8div8sf2_round (rtx, rtx, rtx);
8172extern rtx gen_ufloatv8div8sf2_mask (rtx, rtx, rtx, rtx);
8173extern rtx gen_ufloatv8div8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8174extern rtx gen_floatv4div4sf2 (rtx, rtx);
8175static inline rtx gen_floatv4div4sf2_round (rtx, rtx, rtx);
8176static inline rtx
8177gen_floatv4div4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8178{
8179 return 0;
8180}
8181extern rtx gen_floatv4div4sf2_mask (rtx, rtx, rtx, rtx);
8182static inline rtx gen_floatv4div4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8183static inline rtx
8184gen_floatv4div4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8185{
8186 return 0;
8187}
8188extern rtx gen_ufloatv4div4sf2 (rtx, rtx);
8189static inline rtx gen_ufloatv4div4sf2_round (rtx, rtx, rtx);
8190static inline rtx
8191gen_ufloatv4div4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8192{
8193 return 0;
8194}
8195extern rtx gen_ufloatv4div4sf2_mask (rtx, rtx, rtx, rtx);
8196static inline rtx gen_ufloatv4div4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
8197static inline rtx
8198gen_ufloatv4div4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8199{
8200 return 0;
8201}
8202extern rtx gen_floatv2div2sf2_mask (rtx, rtx, rtx, rtx);
8203extern rtx gen_ufloatv2div2sf2_mask (rtx, rtx, rtx, rtx);
8204extern rtx gen_ufloatv8siv8df2 (rtx, rtx);
8205extern rtx gen_ufloatv8siv8df2_mask (rtx, rtx, rtx, rtx);
8206extern rtx gen_ufloatv4siv4df2 (rtx, rtx);
8207extern rtx gen_ufloatv4siv4df2_mask (rtx, rtx, rtx, rtx);
8208extern rtx gen_ufloatv2siv2df2 (rtx, rtx);
8209extern rtx gen_ufloatv2siv2df2_mask (rtx, rtx, rtx, rtx);
8210extern rtx gen_avx512f_cvtdq2pd512_2 (rtx, rtx);
8211extern rtx gen_avx_cvtdq2pd256_2 (rtx, rtx);
8212extern rtx gen_sse2_cvtdq2pd (rtx, rtx);
8213extern rtx gen_sse2_cvtdq2pd_mask (rtx, rtx, rtx, rtx);
8214extern rtx gen_avx512f_cvtpd2dq512 (rtx, rtx);
8215extern rtx gen_avx512f_cvtpd2dq512_round (rtx, rtx, rtx);
8216extern rtx gen_avx512f_cvtpd2dq512_mask (rtx, rtx, rtx, rtx);
8217extern rtx gen_avx512f_cvtpd2dq512_mask_round (rtx, rtx, rtx, rtx, rtx);
8218extern rtx gen_avx_cvtpd2dq256 (rtx, rtx);
8219extern rtx gen_avx_cvtpd2dq256_mask (rtx, rtx, rtx, rtx);
8220extern rtx gen_sse2_cvtpd2dq (rtx, rtx);
8221extern rtx gen_sse2_cvtpd2dq_mask (rtx, rtx, rtx, rtx);
8222extern rtx gen_ufix_notruncv8dfv8si2 (rtx, rtx);
8223extern rtx gen_ufix_notruncv8dfv8si2_round (rtx, rtx, rtx);
8224extern rtx gen_ufix_notruncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
8225extern rtx gen_ufix_notruncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8226extern rtx gen_ufix_notruncv4dfv4si2 (rtx, rtx);
8227extern rtx gen_ufix_notruncv4dfv4si2_round (rtx, rtx, rtx);
8228extern rtx gen_ufix_notruncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
8229extern rtx gen_ufix_notruncv4dfv4si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8230extern rtx gen_ufix_notruncv2dfv2si2 (rtx, rtx);
8231extern rtx gen_ufix_notruncv2dfv2si2_mask (rtx, rtx, rtx, rtx);
8232extern rtx gen_fix_truncv8dfv8si2 (rtx, rtx);
8233extern rtx gen_fix_truncv8dfv8si2_round (rtx, rtx, rtx);
8234extern rtx gen_fix_truncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
8235extern rtx gen_fix_truncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8236extern rtx gen_ufix_truncv8dfv8si2 (rtx, rtx);
8237extern rtx gen_ufix_truncv8dfv8si2_round (rtx, rtx, rtx);
8238extern rtx gen_ufix_truncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
8239extern rtx gen_ufix_truncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
8240extern rtx gen_ufix_truncv2dfv2si2 (rtx, rtx);
8241extern rtx gen_ufix_truncv2dfv2si2_mask (rtx, rtx, rtx, rtx);
8242extern rtx gen_fix_truncv4dfv4si2 (rtx, rtx);
8243extern rtx gen_fix_truncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
8244extern rtx gen_ufix_truncv4dfv4si2 (rtx, rtx);
8245extern rtx gen_ufix_truncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
8246extern rtx gen_fix_truncv8dfv8di2 (rtx, rtx);
8247extern rtx gen_fix_truncv8dfv8di2_round (rtx, rtx, rtx);
8248extern rtx gen_fix_truncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
8249extern rtx gen_fix_truncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8250extern rtx gen_ufix_truncv8dfv8di2 (rtx, rtx);
8251extern rtx gen_ufix_truncv8dfv8di2_round (rtx, rtx, rtx);
8252extern rtx gen_ufix_truncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
8253extern rtx gen_ufix_truncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8254extern rtx gen_fix_truncv4dfv4di2 (rtx, rtx);
8255static inline rtx gen_fix_truncv4dfv4di2_round (rtx, rtx, rtx);
8256static inline rtx
8257gen_fix_truncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8258{
8259 return 0;
8260}
8261extern rtx gen_fix_truncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
8262static inline rtx gen_fix_truncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8263static inline rtx
8264gen_fix_truncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8265{
8266 return 0;
8267}
8268extern rtx gen_ufix_truncv4dfv4di2 (rtx, rtx);
8269static inline rtx gen_ufix_truncv4dfv4di2_round (rtx, rtx, rtx);
8270static inline rtx
8271gen_ufix_truncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8272{
8273 return 0;
8274}
8275extern rtx gen_ufix_truncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
8276static inline rtx gen_ufix_truncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8277static inline rtx
8278gen_ufix_truncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8279{
8280 return 0;
8281}
8282extern rtx gen_fix_truncv2dfv2di2 (rtx, rtx);
8283static inline rtx gen_fix_truncv2dfv2di2_round (rtx, rtx, rtx);
8284static inline rtx
8285gen_fix_truncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8286{
8287 return 0;
8288}
8289extern rtx gen_fix_truncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
8290static inline rtx gen_fix_truncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8291static inline rtx
8292gen_fix_truncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8293{
8294 return 0;
8295}
8296extern rtx gen_ufix_truncv2dfv2di2 (rtx, rtx);
8297static inline rtx gen_ufix_truncv2dfv2di2_round (rtx, rtx, rtx);
8298static inline rtx
8299gen_ufix_truncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8300{
8301 return 0;
8302}
8303extern rtx gen_ufix_truncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
8304static inline rtx gen_ufix_truncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8305static inline rtx
8306gen_ufix_truncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8307{
8308 return 0;
8309}
8310extern rtx gen_fix_notruncv8dfv8di2 (rtx, rtx);
8311extern rtx gen_fix_notruncv8dfv8di2_round (rtx, rtx, rtx);
8312extern rtx gen_fix_notruncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
8313extern rtx gen_fix_notruncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8314extern rtx gen_fix_notruncv4dfv4di2 (rtx, rtx);
8315static inline rtx gen_fix_notruncv4dfv4di2_round (rtx, rtx, rtx);
8316static inline rtx
8317gen_fix_notruncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8318{
8319 return 0;
8320}
8321extern rtx gen_fix_notruncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
8322static inline rtx gen_fix_notruncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8323static inline rtx
8324gen_fix_notruncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8325{
8326 return 0;
8327}
8328extern rtx gen_fix_notruncv2dfv2di2 (rtx, rtx);
8329static inline rtx gen_fix_notruncv2dfv2di2_round (rtx, rtx, rtx);
8330static inline rtx
8331gen_fix_notruncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8332{
8333 return 0;
8334}
8335extern rtx gen_fix_notruncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
8336static inline rtx gen_fix_notruncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8337static inline rtx
8338gen_fix_notruncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8339{
8340 return 0;
8341}
8342extern rtx gen_ufix_notruncv8dfv8di2 (rtx, rtx);
8343extern rtx gen_ufix_notruncv8dfv8di2_round (rtx, rtx, rtx);
8344extern rtx gen_ufix_notruncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
8345extern rtx gen_ufix_notruncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8346extern rtx gen_ufix_notruncv4dfv4di2 (rtx, rtx);
8347static inline rtx gen_ufix_notruncv4dfv4di2_round (rtx, rtx, rtx);
8348static inline rtx
8349gen_ufix_notruncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8350{
8351 return 0;
8352}
8353extern rtx gen_ufix_notruncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
8354static inline rtx gen_ufix_notruncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8355static inline rtx
8356gen_ufix_notruncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8357{
8358 return 0;
8359}
8360extern rtx gen_ufix_notruncv2dfv2di2 (rtx, rtx);
8361static inline rtx gen_ufix_notruncv2dfv2di2_round (rtx, rtx, rtx);
8362static inline rtx
8363gen_ufix_notruncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8364{
8365 return 0;
8366}
8367extern rtx gen_ufix_notruncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
8368static inline rtx gen_ufix_notruncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8369static inline rtx
8370gen_ufix_notruncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8371{
8372 return 0;
8373}
8374extern rtx gen_fix_truncv8sfv8di2 (rtx, rtx);
8375extern rtx gen_fix_truncv8sfv8di2_round (rtx, rtx, rtx);
8376extern rtx gen_fix_truncv8sfv8di2_mask (rtx, rtx, rtx, rtx);
8377extern rtx gen_fix_truncv8sfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8378extern rtx gen_ufix_truncv8sfv8di2 (rtx, rtx);
8379extern rtx gen_ufix_truncv8sfv8di2_round (rtx, rtx, rtx);
8380extern rtx gen_ufix_truncv8sfv8di2_mask (rtx, rtx, rtx, rtx);
8381extern rtx gen_ufix_truncv8sfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8382extern rtx gen_fix_truncv4sfv4di2 (rtx, rtx);
8383static inline rtx gen_fix_truncv4sfv4di2_round (rtx, rtx, rtx);
8384static inline rtx
8385gen_fix_truncv4sfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8386{
8387 return 0;
8388}
8389extern rtx gen_fix_truncv4sfv4di2_mask (rtx, rtx, rtx, rtx);
8390static inline rtx gen_fix_truncv4sfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8391static inline rtx
8392gen_fix_truncv4sfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8393{
8394 return 0;
8395}
8396extern rtx gen_ufix_truncv4sfv4di2 (rtx, rtx);
8397static inline rtx gen_ufix_truncv4sfv4di2_round (rtx, rtx, rtx);
8398static inline rtx
8399gen_ufix_truncv4sfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8400{
8401 return 0;
8402}
8403extern rtx gen_ufix_truncv4sfv4di2_mask (rtx, rtx, rtx, rtx);
8404static inline rtx gen_ufix_truncv4sfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
8405static inline rtx
8406gen_ufix_truncv4sfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8407{
8408 return 0;
8409}
8410extern rtx gen_fix_truncv2sfv2di2 (rtx, rtx);
8411extern rtx gen_fix_truncv2sfv2di2_mask (rtx, rtx, rtx, rtx);
8412extern rtx gen_ufix_truncv2sfv2di2 (rtx, rtx);
8413extern rtx gen_ufix_truncv2sfv2di2_mask (rtx, rtx, rtx, rtx);
8414extern rtx gen_ufix_truncv8sfv8si2 (rtx, rtx);
8415extern rtx gen_ufix_truncv8sfv8si2_mask (rtx, rtx, rtx, rtx);
8416extern rtx gen_ufix_truncv4sfv4si2 (rtx, rtx);
8417extern rtx gen_ufix_truncv4sfv4si2_mask (rtx, rtx, rtx, rtx);
8418extern rtx gen_sse2_cvttpd2dq (rtx, rtx);
8419extern rtx gen_sse2_cvttpd2dq_mask (rtx, rtx, rtx, rtx);
8420extern rtx gen_sse2_cvtsd2ss (rtx, rtx, rtx);
8421extern rtx gen_sse2_cvtsd2ss_round (rtx, rtx, rtx, rtx);
8422extern rtx gen_sse2_cvtss2sd (rtx, rtx, rtx);
8423extern rtx gen_sse2_cvtss2sd_round (rtx, rtx, rtx, rtx);
8424extern rtx gen_avx512f_cvtpd2ps512_mask (rtx, rtx, rtx, rtx);
8425extern rtx gen_avx512f_cvtpd2ps512_mask_round (rtx, rtx, rtx, rtx, rtx);
8426extern rtx gen_avx_cvtpd2ps256 (rtx, rtx);
8427extern rtx gen_avx_cvtpd2ps256_mask (rtx, rtx, rtx, rtx);
8428extern rtx gen_avx512f_cvtps2pd512 (rtx, rtx);
8429extern rtx gen_avx512f_cvtps2pd512_round (rtx, rtx, rtx);
8430extern rtx gen_avx512f_cvtps2pd512_mask (rtx, rtx, rtx, rtx);
8431extern rtx gen_avx512f_cvtps2pd512_mask_round (rtx, rtx, rtx, rtx, rtx);
8432extern rtx gen_avx_cvtps2pd256 (rtx, rtx);
8433static inline rtx gen_avx_cvtps2pd256_round (rtx, rtx, rtx);
8434static inline rtx
8435gen_avx_cvtps2pd256_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
8436{
8437 return 0;
8438}
8439extern rtx gen_avx_cvtps2pd256_mask (rtx, rtx, rtx, rtx);
8440static inline rtx gen_avx_cvtps2pd256_mask_round (rtx, rtx, rtx, rtx, rtx);
8441static inline rtx
8442gen_avx_cvtps2pd256_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
8443{
8444 return 0;
8445}
8446extern rtx gen_vec_unpacks_lo_v16sf (rtx, rtx);
8447extern rtx gen_avx512bw_cvtb2maskv64qi (rtx, rtx);
8448extern rtx gen_avx512vl_cvtb2maskv16qi (rtx, rtx);
8449extern rtx gen_avx512vl_cvtb2maskv32qi (rtx, rtx);
8450extern rtx gen_avx512bw_cvtw2maskv32hi (rtx, rtx);
8451extern rtx gen_avx512vl_cvtw2maskv16hi (rtx, rtx);
8452extern rtx gen_avx512vl_cvtw2maskv8hi (rtx, rtx);
8453extern rtx gen_avx512f_cvtd2maskv16si (rtx, rtx);
8454extern rtx gen_avx512vl_cvtd2maskv8si (rtx, rtx);
8455extern rtx gen_avx512vl_cvtd2maskv4si (rtx, rtx);
8456extern rtx gen_avx512f_cvtq2maskv8di (rtx, rtx);
8457extern rtx gen_avx512vl_cvtq2maskv4di (rtx, rtx);
8458extern rtx gen_avx512vl_cvtq2maskv2di (rtx, rtx);
8459extern rtx gen_sse2_cvtps2pd (rtx, rtx);
8460extern rtx gen_sse2_cvtps2pd_mask (rtx, rtx, rtx, rtx);
8461extern rtx gen_sse_movhlps (rtx, rtx, rtx);
8462extern rtx gen_sse_movlhps (rtx, rtx, rtx);
8463extern rtx gen_avx512f_unpckhps512_mask (rtx, rtx, rtx, rtx, rtx);
8464extern rtx gen_avx_unpckhps256 (rtx, rtx, rtx);
8465extern rtx gen_avx_unpckhps256_mask (rtx, rtx, rtx, rtx, rtx);
8466extern rtx gen_vec_interleave_highv4sf (rtx, rtx, rtx);
8467extern rtx gen_vec_interleave_highv4sf_mask (rtx, rtx, rtx, rtx, rtx);
8468extern rtx gen_avx512f_unpcklps512_mask (rtx, rtx, rtx, rtx, rtx);
8469extern rtx gen_avx_unpcklps256 (rtx, rtx, rtx);
8470extern rtx gen_avx_unpcklps256_mask (rtx, rtx, rtx, rtx, rtx);
8471extern rtx gen_unpcklps128_mask (rtx, rtx, rtx, rtx, rtx);
8472extern rtx gen_vec_interleave_lowv4sf (rtx, rtx, rtx);
8473extern rtx gen_avx_movshdup256 (rtx, rtx);
8474extern rtx gen_avx_movshdup256_mask (rtx, rtx, rtx, rtx);
8475extern rtx gen_sse3_movshdup (rtx, rtx);
8476extern rtx gen_sse3_movshdup_mask (rtx, rtx, rtx, rtx);
8477extern rtx gen_avx512f_movshdup512_mask (rtx, rtx, rtx, rtx);
8478extern rtx gen_avx_movsldup256 (rtx, rtx);
8479extern rtx gen_avx_movsldup256_mask (rtx, rtx, rtx, rtx);
8480extern rtx gen_sse3_movsldup (rtx, rtx);
8481extern rtx gen_sse3_movsldup_mask (rtx, rtx, rtx, rtx);
8482extern rtx gen_avx512f_movsldup512_mask (rtx, rtx, rtx, rtx);
8483extern rtx gen_avx_shufps256_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8484extern rtx gen_avx_shufps256_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8485extern rtx gen_sse_shufps_v4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8486extern rtx gen_sse_shufps_v4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8487extern rtx gen_sse_shufps_v4sf (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8488extern rtx gen_sse_storehps (rtx, rtx);
8489extern rtx gen_sse_loadhps (rtx, rtx, rtx);
8490extern rtx gen_sse_storelps (rtx, rtx);
8491extern rtx gen_sse_loadlps (rtx, rtx, rtx);
8492extern rtx gen_sse_movss (rtx, rtx, rtx);
8493extern rtx gen_avx2_vec_dupv8sf (rtx, rtx);
8494extern rtx gen_avx2_vec_dupv4sf (rtx, rtx);
8495extern rtx gen_avx2_vec_dupv8sf_1 (rtx, rtx);
8496extern rtx gen_avx512f_vec_dupv16sf_1 (rtx, rtx);
8497extern rtx gen_avx512f_vec_dupv8df_1 (rtx, rtx);
8498extern rtx gen_vec_setv4si_0 (rtx, rtx, rtx);
8499extern rtx gen_vec_setv4sf_0 (rtx, rtx, rtx);
8500extern rtx gen_sse4_1_insertps (rtx, rtx, rtx, rtx);
8501extern rtx gen_avx512dq_vextractf64x2_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx);
8502extern rtx gen_avx512dq_vextracti64x2_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx);
8503extern rtx gen_avx512f_vextractf32x4_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8504extern rtx gen_avx512f_vextracti32x4_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8505extern rtx gen_avx512dq_vextractf64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8506extern rtx gen_avx512dq_vextracti64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8507extern rtx gen_avx512f_vextractf32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8508extern rtx gen_avx512f_vextracti32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8509extern rtx gen_vec_extract_lo_v8df_maskm (rtx, rtx, rtx, rtx);
8510extern rtx gen_vec_extract_lo_v8di_maskm (rtx, rtx, rtx, rtx);
8511extern rtx gen_vec_extract_lo_v8df (rtx, rtx);
8512extern rtx gen_vec_extract_lo_v8df_mask (rtx, rtx, rtx, rtx);
8513extern rtx gen_vec_extract_lo_v8di (rtx, rtx);
8514extern rtx gen_vec_extract_lo_v8di_mask (rtx, rtx, rtx, rtx);
8515extern rtx gen_vec_extract_hi_v8df_maskm (rtx, rtx, rtx, rtx);
8516extern rtx gen_vec_extract_hi_v8di_maskm (rtx, rtx, rtx, rtx);
8517extern rtx gen_vec_extract_hi_v8df (rtx, rtx);
8518extern rtx gen_vec_extract_hi_v8df_mask (rtx, rtx, rtx, rtx);
8519extern rtx gen_vec_extract_hi_v8di (rtx, rtx);
8520extern rtx gen_vec_extract_hi_v8di_mask (rtx, rtx, rtx, rtx);
8521extern rtx gen_vec_extract_hi_v16sf_maskm (rtx, rtx, rtx, rtx);
8522extern rtx gen_vec_extract_hi_v16si_maskm (rtx, rtx, rtx, rtx);
8523extern rtx gen_vec_extract_hi_v16sf (rtx, rtx);
8524extern rtx gen_vec_extract_hi_v16sf_mask (rtx, rtx, rtx, rtx);
8525extern rtx gen_vec_extract_hi_v16si (rtx, rtx);
8526extern rtx gen_vec_extract_hi_v16si_mask (rtx, rtx, rtx, rtx);
8527extern rtx gen_vec_extract_lo_v16sf (rtx, rtx);
8528extern rtx gen_vec_extract_lo_v16sf_mask (rtx, rtx, rtx, rtx);
8529extern rtx gen_vec_extract_lo_v16si (rtx, rtx);
8530extern rtx gen_vec_extract_lo_v16si_mask (rtx, rtx, rtx, rtx);
8531extern rtx gen_vec_extract_lo_v4di (rtx, rtx);
8532extern rtx gen_vec_extract_lo_v4di_mask (rtx, rtx, rtx, rtx);
8533extern rtx gen_vec_extract_lo_v4df (rtx, rtx);
8534extern rtx gen_vec_extract_lo_v4df_mask (rtx, rtx, rtx, rtx);
8535extern rtx gen_vec_extract_hi_v4di (rtx, rtx);
8536extern rtx gen_vec_extract_hi_v4di_mask (rtx, rtx, rtx, rtx);
8537extern rtx gen_vec_extract_hi_v4df (rtx, rtx);
8538extern rtx gen_vec_extract_hi_v4df_mask (rtx, rtx, rtx, rtx);
8539extern rtx gen_vec_extract_lo_v8si (rtx, rtx);
8540extern rtx gen_vec_extract_lo_v8si_mask (rtx, rtx, rtx, rtx);
8541extern rtx gen_vec_extract_lo_v8sf (rtx, rtx);
8542extern rtx gen_vec_extract_lo_v8sf_mask (rtx, rtx, rtx, rtx);
8543extern rtx gen_vec_extract_lo_v8si_maskm (rtx, rtx, rtx, rtx);
8544extern rtx gen_vec_extract_lo_v8sf_maskm (rtx, rtx, rtx, rtx);
8545extern rtx gen_vec_extract_hi_v8si_maskm (rtx, rtx, rtx, rtx);
8546extern rtx gen_vec_extract_hi_v8sf_maskm (rtx, rtx, rtx, rtx);
8547extern rtx gen_vec_extract_hi_v8si_mask (rtx, rtx, rtx, rtx);
8548extern rtx gen_vec_extract_hi_v8sf_mask (rtx, rtx, rtx, rtx);
8549extern rtx gen_vec_extract_hi_v8si (rtx, rtx);
8550extern rtx gen_vec_extract_hi_v8sf (rtx, rtx);
8551extern rtx gen_vec_extract_lo_v32hi (rtx, rtx);
8552extern rtx gen_vec_extract_hi_v32hi (rtx, rtx);
8553extern rtx gen_vec_extract_lo_v16hi (rtx, rtx);
8554extern rtx gen_vec_extract_hi_v16hi (rtx, rtx);
8555extern rtx gen_vec_extract_lo_v64qi (rtx, rtx);
8556extern rtx gen_vec_extract_hi_v64qi (rtx, rtx);
8557extern rtx gen_vec_extract_lo_v32qi (rtx, rtx);
8558extern rtx gen_vec_extract_hi_v32qi (rtx, rtx);
8559extern rtx gen_avx512f_unpckhpd512_mask (rtx, rtx, rtx, rtx, rtx);
8560extern rtx gen_avx_unpckhpd256 (rtx, rtx, rtx);
8561extern rtx gen_avx_unpckhpd256_mask (rtx, rtx, rtx, rtx, rtx);
8562extern rtx gen_avx512vl_unpckhpd128_mask (rtx, rtx, rtx, rtx, rtx);
8563extern rtx gen_avx512vl_unpcklpd128_mask (rtx, rtx, rtx, rtx, rtx);
8564extern rtx gen_avx512f_vmscalefv4sf (rtx, rtx, rtx);
8565extern rtx gen_avx512f_vmscalefv4sf_round (rtx, rtx, rtx, rtx);
8566extern rtx gen_avx512f_vmscalefv2df (rtx, rtx, rtx);
8567extern rtx gen_avx512f_vmscalefv2df_round (rtx, rtx, rtx, rtx);
8568extern rtx gen_avx512f_scalefv16sf (rtx, rtx, rtx);
8569extern rtx gen_avx512f_scalefv16sf_round (rtx, rtx, rtx, rtx);
8570extern rtx gen_avx512f_scalefv16sf_mask (rtx, rtx, rtx, rtx, rtx);
8571extern rtx gen_avx512f_scalefv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8572extern rtx gen_avx512vl_scalefv8sf (rtx, rtx, rtx);
8573extern rtx gen_avx512vl_scalefv8sf_round (rtx, rtx, rtx, rtx);
8574extern rtx gen_avx512vl_scalefv8sf_mask (rtx, rtx, rtx, rtx, rtx);
8575extern rtx gen_avx512vl_scalefv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8576extern rtx gen_avx512vl_scalefv4sf (rtx, rtx, rtx);
8577extern rtx gen_avx512vl_scalefv4sf_round (rtx, rtx, rtx, rtx);
8578extern rtx gen_avx512vl_scalefv4sf_mask (rtx, rtx, rtx, rtx, rtx);
8579extern rtx gen_avx512vl_scalefv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8580extern rtx gen_avx512f_scalefv8df (rtx, rtx, rtx);
8581extern rtx gen_avx512f_scalefv8df_round (rtx, rtx, rtx, rtx);
8582extern rtx gen_avx512f_scalefv8df_mask (rtx, rtx, rtx, rtx, rtx);
8583extern rtx gen_avx512f_scalefv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8584extern rtx gen_avx512vl_scalefv4df (rtx, rtx, rtx);
8585extern rtx gen_avx512vl_scalefv4df_round (rtx, rtx, rtx, rtx);
8586extern rtx gen_avx512vl_scalefv4df_mask (rtx, rtx, rtx, rtx, rtx);
8587extern rtx gen_avx512vl_scalefv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8588extern rtx gen_avx512vl_scalefv2df (rtx, rtx, rtx);
8589extern rtx gen_avx512vl_scalefv2df_round (rtx, rtx, rtx, rtx);
8590extern rtx gen_avx512vl_scalefv2df_mask (rtx, rtx, rtx, rtx, rtx);
8591extern rtx gen_avx512vl_scalefv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8592extern rtx gen_avx512f_vternlogv16si (rtx, rtx, rtx, rtx, rtx);
8593extern rtx gen_avx512f_vternlogv16si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8594extern rtx gen_avx512vl_vternlogv8si (rtx, rtx, rtx, rtx, rtx);
8595extern rtx gen_avx512vl_vternlogv8si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8596extern rtx gen_avx512vl_vternlogv4si (rtx, rtx, rtx, rtx, rtx);
8597extern rtx gen_avx512vl_vternlogv4si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8598extern rtx gen_avx512f_vternlogv8di (rtx, rtx, rtx, rtx, rtx);
8599extern rtx gen_avx512f_vternlogv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8600extern rtx gen_avx512vl_vternlogv4di (rtx, rtx, rtx, rtx, rtx);
8601extern rtx gen_avx512vl_vternlogv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8602extern rtx gen_avx512vl_vternlogv2di (rtx, rtx, rtx, rtx, rtx);
8603extern rtx gen_avx512vl_vternlogv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8604extern rtx gen_avx512f_vternlogv16si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8605extern rtx gen_avx512vl_vternlogv8si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8606extern rtx gen_avx512vl_vternlogv4si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8607extern rtx gen_avx512f_vternlogv8di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8608extern rtx gen_avx512vl_vternlogv4di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8609extern rtx gen_avx512vl_vternlogv2di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8610extern rtx gen_avx512f_getexpv16sf (rtx, rtx);
8611extern rtx gen_avx512f_getexpv16sf_round (rtx, rtx, rtx);
8612extern rtx gen_avx512f_getexpv16sf_mask (rtx, rtx, rtx, rtx);
8613extern rtx gen_avx512f_getexpv16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
8614extern rtx gen_avx512vl_getexpv8sf (rtx, rtx);
8615extern rtx gen_avx512vl_getexpv8sf_round (rtx, rtx, rtx);
8616extern rtx gen_avx512vl_getexpv8sf_mask (rtx, rtx, rtx, rtx);
8617extern rtx gen_avx512vl_getexpv8sf_mask_round (rtx, rtx, rtx, rtx, rtx);
8618extern rtx gen_avx512vl_getexpv4sf (rtx, rtx);
8619extern rtx gen_avx512vl_getexpv4sf_round (rtx, rtx, rtx);
8620extern rtx gen_avx512vl_getexpv4sf_mask (rtx, rtx, rtx, rtx);
8621extern rtx gen_avx512vl_getexpv4sf_mask_round (rtx, rtx, rtx, rtx, rtx);
8622extern rtx gen_avx512f_getexpv8df (rtx, rtx);
8623extern rtx gen_avx512f_getexpv8df_round (rtx, rtx, rtx);
8624extern rtx gen_avx512f_getexpv8df_mask (rtx, rtx, rtx, rtx);
8625extern rtx gen_avx512f_getexpv8df_mask_round (rtx, rtx, rtx, rtx, rtx);
8626extern rtx gen_avx512vl_getexpv4df (rtx, rtx);
8627extern rtx gen_avx512vl_getexpv4df_round (rtx, rtx, rtx);
8628extern rtx gen_avx512vl_getexpv4df_mask (rtx, rtx, rtx, rtx);
8629extern rtx gen_avx512vl_getexpv4df_mask_round (rtx, rtx, rtx, rtx, rtx);
8630extern rtx gen_avx512vl_getexpv2df (rtx, rtx);
8631extern rtx gen_avx512vl_getexpv2df_round (rtx, rtx, rtx);
8632extern rtx gen_avx512vl_getexpv2df_mask (rtx, rtx, rtx, rtx);
8633extern rtx gen_avx512vl_getexpv2df_mask_round (rtx, rtx, rtx, rtx, rtx);
8634extern rtx gen_avx512f_sgetexpv4sf (rtx, rtx, rtx);
8635extern rtx gen_avx512f_sgetexpv4sf_round (rtx, rtx, rtx, rtx);
8636extern rtx gen_avx512f_sgetexpv2df (rtx, rtx, rtx);
8637extern rtx gen_avx512f_sgetexpv2df_round (rtx, rtx, rtx, rtx);
8638extern rtx gen_avx512f_alignv16si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8639extern rtx gen_avx512vl_alignv8si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8640extern rtx gen_avx512vl_alignv4si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8641extern rtx gen_avx512f_alignv8di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8642extern rtx gen_avx512vl_alignv4di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8643extern rtx gen_avx512vl_alignv2di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8644extern rtx gen_avx512f_fixupimmv16sf (rtx, rtx, rtx, rtx, rtx);
8645extern rtx gen_avx512f_fixupimmv16sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
8646extern rtx gen_avx512f_fixupimmv16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8647extern rtx gen_avx512f_fixupimmv16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8648extern rtx gen_avx512vl_fixupimmv8sf (rtx, rtx, rtx, rtx, rtx);
8649extern rtx gen_avx512vl_fixupimmv8sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
8650extern rtx gen_avx512vl_fixupimmv8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8651extern rtx gen_avx512vl_fixupimmv8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8652extern rtx gen_avx512vl_fixupimmv4sf (rtx, rtx, rtx, rtx, rtx);
8653extern rtx gen_avx512vl_fixupimmv4sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
8654extern rtx gen_avx512vl_fixupimmv4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8655extern rtx gen_avx512vl_fixupimmv4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8656extern rtx gen_avx512f_fixupimmv8df (rtx, rtx, rtx, rtx, rtx);
8657extern rtx gen_avx512f_fixupimmv8df_round (rtx, rtx, rtx, rtx, rtx, rtx);
8658extern rtx gen_avx512f_fixupimmv8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8659extern rtx gen_avx512f_fixupimmv8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8660extern rtx gen_avx512vl_fixupimmv4df (rtx, rtx, rtx, rtx, rtx);
8661extern rtx gen_avx512vl_fixupimmv4df_round (rtx, rtx, rtx, rtx, rtx, rtx);
8662extern rtx gen_avx512vl_fixupimmv4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8663extern rtx gen_avx512vl_fixupimmv4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8664extern rtx gen_avx512vl_fixupimmv2df (rtx, rtx, rtx, rtx, rtx);
8665extern rtx gen_avx512vl_fixupimmv2df_round (rtx, rtx, rtx, rtx, rtx, rtx);
8666extern rtx gen_avx512vl_fixupimmv2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8667extern rtx gen_avx512vl_fixupimmv2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8668extern rtx gen_avx512f_fixupimmv16sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8669extern rtx gen_avx512f_fixupimmv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8670extern rtx gen_avx512vl_fixupimmv8sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8671extern rtx gen_avx512vl_fixupimmv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8672extern rtx gen_avx512vl_fixupimmv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8673extern rtx gen_avx512vl_fixupimmv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8674extern rtx gen_avx512f_fixupimmv8df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8675extern rtx gen_avx512f_fixupimmv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8676extern rtx gen_avx512vl_fixupimmv4df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8677extern rtx gen_avx512vl_fixupimmv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8678extern rtx gen_avx512vl_fixupimmv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8679extern rtx gen_avx512vl_fixupimmv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8680extern rtx gen_avx512f_sfixupimmv4sf (rtx, rtx, rtx, rtx, rtx);
8681extern rtx gen_avx512f_sfixupimmv4sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
8682extern rtx gen_avx512f_sfixupimmv4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8683extern rtx gen_avx512f_sfixupimmv4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8684extern rtx gen_avx512f_sfixupimmv2df (rtx, rtx, rtx, rtx, rtx);
8685extern rtx gen_avx512f_sfixupimmv2df_round (rtx, rtx, rtx, rtx, rtx, rtx);
8686extern rtx gen_avx512f_sfixupimmv2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8687extern rtx gen_avx512f_sfixupimmv2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8688extern rtx gen_avx512f_sfixupimmv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8689extern rtx gen_avx512f_sfixupimmv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8690extern rtx gen_avx512f_sfixupimmv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
8691extern rtx gen_avx512f_sfixupimmv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8692extern rtx gen_avx512f_rndscalev16sf (rtx, rtx, rtx);
8693extern rtx gen_avx512f_rndscalev16sf_round (rtx, rtx, rtx, rtx);
8694extern rtx gen_avx512f_rndscalev16sf_mask (rtx, rtx, rtx, rtx, rtx);
8695extern rtx gen_avx512f_rndscalev16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8696extern rtx gen_avx512vl_rndscalev8sf (rtx, rtx, rtx);
8697extern rtx gen_avx512vl_rndscalev8sf_round (rtx, rtx, rtx, rtx);
8698extern rtx gen_avx512vl_rndscalev8sf_mask (rtx, rtx, rtx, rtx, rtx);
8699extern rtx gen_avx512vl_rndscalev8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8700extern rtx gen_avx512vl_rndscalev4sf (rtx, rtx, rtx);
8701extern rtx gen_avx512vl_rndscalev4sf_round (rtx, rtx, rtx, rtx);
8702extern rtx gen_avx512vl_rndscalev4sf_mask (rtx, rtx, rtx, rtx, rtx);
8703extern rtx gen_avx512vl_rndscalev4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8704extern rtx gen_avx512f_rndscalev8df (rtx, rtx, rtx);
8705extern rtx gen_avx512f_rndscalev8df_round (rtx, rtx, rtx, rtx);
8706extern rtx gen_avx512f_rndscalev8df_mask (rtx, rtx, rtx, rtx, rtx);
8707extern rtx gen_avx512f_rndscalev8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8708extern rtx gen_avx512vl_rndscalev4df (rtx, rtx, rtx);
8709extern rtx gen_avx512vl_rndscalev4df_round (rtx, rtx, rtx, rtx);
8710extern rtx gen_avx512vl_rndscalev4df_mask (rtx, rtx, rtx, rtx, rtx);
8711extern rtx gen_avx512vl_rndscalev4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8712extern rtx gen_avx512vl_rndscalev2df (rtx, rtx, rtx);
8713extern rtx gen_avx512vl_rndscalev2df_round (rtx, rtx, rtx, rtx);
8714extern rtx gen_avx512vl_rndscalev2df_mask (rtx, rtx, rtx, rtx, rtx);
8715extern rtx gen_avx512vl_rndscalev2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
8716extern rtx gen_avx512f_rndscalev4sf (rtx, rtx, rtx, rtx);
8717extern rtx gen_avx512f_rndscalev4sf_round (rtx, rtx, rtx, rtx, rtx);
8718extern rtx gen_avx512f_rndscalev2df (rtx, rtx, rtx, rtx);
8719extern rtx gen_avx512f_rndscalev2df_round (rtx, rtx, rtx, rtx, rtx);
8720extern rtx gen_avx512f_shufps512_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8721extern rtx gen_avx512f_shufps512_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8722extern rtx gen_avx512f_shufpd512_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8723extern rtx gen_avx512f_shufpd512_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8724extern rtx gen_avx_shufpd256_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8725extern rtx gen_avx_shufpd256_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8726extern rtx gen_sse2_shufpd_v2df_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
8727extern rtx gen_avx2_interleave_highv4di (rtx, rtx, rtx);
8728extern rtx gen_avx2_interleave_highv4di_mask (rtx, rtx, rtx, rtx, rtx);
8729extern rtx gen_avx512f_interleave_highv8di_mask (rtx, rtx, rtx, rtx, rtx);
8730extern rtx gen_vec_interleave_highv2di (rtx, rtx, rtx);
8731extern rtx gen_vec_interleave_highv2di_mask (rtx, rtx, rtx, rtx, rtx);
8732extern rtx gen_avx2_interleave_lowv4di (rtx, rtx, rtx);
8733extern rtx gen_avx2_interleave_lowv4di_mask (rtx, rtx, rtx, rtx, rtx);
8734extern rtx gen_avx512f_interleave_lowv8di_mask (rtx, rtx, rtx, rtx, rtx);
8735extern rtx gen_vec_interleave_lowv2di (rtx, rtx, rtx);
8736extern rtx gen_vec_interleave_lowv2di_mask (rtx, rtx, rtx, rtx, rtx);
8737extern rtx gen_sse2_shufpd_v2di (rtx, rtx, rtx, rtx, rtx);
8738extern rtx gen_sse2_shufpd_v2df (rtx, rtx, rtx, rtx, rtx);
8739extern rtx gen_sse2_storehpd (rtx, rtx);
8740extern rtx gen_sse2_storelpd (rtx, rtx);
8741extern rtx gen_sse2_loadhpd (rtx, rtx, rtx);
8742extern rtx gen_sse2_loadlpd (rtx, rtx, rtx);
8743extern rtx gen_sse2_movsd (rtx, rtx, rtx);
8744extern rtx gen_vec_dupv2df (rtx, rtx);
8745extern rtx gen_vec_dupv2df_mask (rtx, rtx, rtx, rtx);
8746extern rtx gen_vec_concatv2df (rtx, rtx, rtx);
8747extern rtx gen_avx512f_ss_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
8748extern rtx gen_avx512f_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
8749extern rtx gen_avx512f_us_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
8750extern rtx gen_avx512f_ss_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
8751extern rtx gen_avx512f_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
8752extern rtx gen_avx512f_us_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
8753extern rtx gen_avx512f_ss_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
8754extern rtx gen_avx512f_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
8755extern rtx gen_avx512f_us_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
8756extern rtx gen_avx512f_ss_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
8757extern rtx gen_avx512f_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
8758extern rtx gen_avx512f_us_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
8759extern rtx gen_avx512bw_ss_truncatev32hiv32qi2 (rtx, rtx);
8760extern rtx gen_avx512bw_truncatev32hiv32qi2 (rtx, rtx);
8761extern rtx gen_avx512bw_us_truncatev32hiv32qi2 (rtx, rtx);
8762extern rtx gen_avx512bw_ss_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
8763extern rtx gen_avx512bw_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
8764extern rtx gen_avx512bw_us_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
8765extern rtx gen_avx512vl_ss_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
8766extern rtx gen_avx512vl_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
8767extern rtx gen_avx512vl_us_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
8768extern rtx gen_avx512vl_ss_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
8769extern rtx gen_avx512vl_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
8770extern rtx gen_avx512vl_us_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
8771extern rtx gen_avx512vl_ss_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
8772extern rtx gen_avx512vl_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
8773extern rtx gen_avx512vl_us_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
8774extern rtx gen_avx512vl_ss_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
8775extern rtx gen_avx512vl_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
8776extern rtx gen_avx512vl_us_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
8777extern rtx gen_avx512vl_ss_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
8778extern rtx gen_avx512vl_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
8779extern rtx gen_avx512vl_us_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
8780extern rtx gen_avx512vl_ss_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
8781extern rtx gen_avx512vl_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
8782extern rtx gen_avx512vl_us_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
8783extern rtx gen_avx512vl_ss_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
8784extern rtx gen_avx512vl_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
8785extern rtx gen_avx512vl_us_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
8786extern rtx gen_avx512vl_ss_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
8787extern rtx gen_avx512vl_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
8788extern rtx gen_avx512vl_us_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
8789extern rtx gen_avx512vl_ss_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
8790extern rtx gen_avx512vl_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
8791extern rtx gen_avx512vl_us_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
8792extern rtx gen_avx512vl_ss_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
8793extern rtx gen_avx512vl_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
8794extern rtx gen_avx512vl_us_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
8795extern rtx gen_avx512vl_ss_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
8796extern rtx gen_avx512vl_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
8797extern rtx gen_avx512vl_us_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
8798extern rtx gen_avx512vl_ss_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
8799extern rtx gen_avx512vl_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
8800extern rtx gen_avx512vl_us_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
8801extern rtx gen_avx512vl_ss_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
8802extern rtx gen_avx512vl_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
8803extern rtx gen_avx512vl_us_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
8804extern rtx gen_avx512vl_ss_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
8805extern rtx gen_avx512vl_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
8806extern rtx gen_avx512vl_us_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
8807extern rtx gen_avx512vl_ss_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
8808extern rtx gen_avx512vl_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
8809extern rtx gen_avx512vl_us_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
8810extern rtx gen_avx512vl_ss_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
8811extern rtx gen_avx512vl_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
8812extern rtx gen_avx512vl_us_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
8813extern rtx gen_avx512vl_ss_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
8814extern rtx gen_avx512vl_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
8815extern rtx gen_avx512vl_us_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
8816extern rtx gen_avx512vl_ss_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
8817extern rtx gen_avx512vl_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
8818extern rtx gen_avx512vl_us_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
8819extern rtx gen_avx512vl_ss_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
8820extern rtx gen_avx512vl_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
8821extern rtx gen_avx512vl_us_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
8822extern rtx gen_avx512vl_ss_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
8823extern rtx gen_avx512vl_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
8824extern rtx gen_avx512vl_us_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
8825extern rtx gen_avx512vl_ss_truncatev2div2si2_mask_store (rtx, rtx, rtx);
8826extern rtx gen_avx512vl_truncatev2div2si2_mask_store (rtx, rtx, rtx);
8827extern rtx gen_avx512vl_us_truncatev2div2si2_mask_store (rtx, rtx, rtx);
8828extern rtx gen_avx512f_ss_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
8829extern rtx gen_avx512f_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
8830extern rtx gen_avx512f_us_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
8831extern rtx gen_avx512f_ss_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
8832extern rtx gen_avx512f_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
8833extern rtx gen_avx512f_us_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
8834extern rtx gen_avx512bw_pmaddwd512v32hi (rtx, rtx, rtx);
8835extern rtx gen_avx512bw_pmaddwd512v32hi_mask (rtx, rtx, rtx, rtx, rtx);
8836extern rtx gen_avx512bw_pmaddwd512v16hi (rtx, rtx, rtx);
8837extern rtx gen_avx512bw_pmaddwd512v16hi_mask (rtx, rtx, rtx, rtx, rtx);
8838extern rtx gen_avx512bw_pmaddwd512v8hi (rtx, rtx, rtx);
8839extern rtx gen_avx512bw_pmaddwd512v8hi_mask (rtx, rtx, rtx, rtx, rtx);
8840extern rtx gen_avx512dq_mulv8di3 (rtx, rtx, rtx);
8841extern rtx gen_avx512dq_mulv8di3_mask (rtx, rtx, rtx, rtx, rtx);
8842extern rtx gen_avx512dq_mulv4di3 (rtx, rtx, rtx);
8843extern rtx gen_avx512dq_mulv4di3_mask (rtx, rtx, rtx, rtx, rtx);
8844extern rtx gen_avx512dq_mulv2di3 (rtx, rtx, rtx);
8845extern rtx gen_avx512dq_mulv2di3_mask (rtx, rtx, rtx, rtx, rtx);
8846extern rtx gen_ashrv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8847extern rtx gen_ashrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8848extern rtx gen_ashrv8si3_mask (rtx, rtx, rtx, rtx, rtx);
8849extern rtx gen_ashrv4si3_mask (rtx, rtx, rtx, rtx, rtx);
8850extern rtx gen_ashrv16hi3 (rtx, rtx, rtx);
8851extern rtx gen_ashrv8hi3 (rtx, rtx, rtx);
8852extern rtx gen_ashrv8si3 (rtx, rtx, rtx);
8853extern rtx gen_ashrv4si3 (rtx, rtx, rtx);
8854extern rtx gen_ashrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
8855extern rtx gen_ashrv32hi3 (rtx, rtx, rtx);
8856extern rtx gen_ashrv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8857extern rtx gen_ashrv4di3 (rtx, rtx, rtx);
8858extern rtx gen_ashrv4di3_mask (rtx, rtx, rtx, rtx, rtx);
8859extern rtx gen_ashrv16si3 (rtx, rtx, rtx);
8860extern rtx gen_ashrv16si3_mask (rtx, rtx, rtx, rtx, rtx);
8861extern rtx gen_ashrv8di3 (rtx, rtx, rtx);
8862extern rtx gen_ashrv8di3_mask (rtx, rtx, rtx, rtx, rtx);
8863extern rtx gen_ashlv32hi3 (rtx, rtx, rtx);
8864extern rtx gen_ashlv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8865extern rtx gen_lshrv32hi3 (rtx, rtx, rtx);
8866extern rtx gen_lshrv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8867extern rtx gen_ashlv16hi3 (rtx, rtx, rtx);
8868extern rtx gen_ashlv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8869extern rtx gen_lshrv16hi3 (rtx, rtx, rtx);
8870extern rtx gen_lshrv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8871extern rtx gen_ashlv8hi3 (rtx, rtx, rtx);
8872extern rtx gen_ashlv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8873extern rtx gen_lshrv8hi3 (rtx, rtx, rtx);
8874extern rtx gen_lshrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8875extern rtx gen_ashlv8si3 (rtx, rtx, rtx);
8876extern rtx gen_ashlv8si3_mask (rtx, rtx, rtx, rtx, rtx);
8877extern rtx gen_lshrv8si3 (rtx, rtx, rtx);
8878extern rtx gen_lshrv8si3_mask (rtx, rtx, rtx, rtx, rtx);
8879extern rtx gen_ashlv4si3 (rtx, rtx, rtx);
8880extern rtx gen_ashlv4si3_mask (rtx, rtx, rtx, rtx, rtx);
8881extern rtx gen_lshrv4si3 (rtx, rtx, rtx);
8882extern rtx gen_lshrv4si3_mask (rtx, rtx, rtx, rtx, rtx);
8883extern rtx gen_ashlv4di3 (rtx, rtx, rtx);
8884extern rtx gen_ashlv4di3_mask (rtx, rtx, rtx, rtx, rtx);
8885extern rtx gen_lshrv4di3 (rtx, rtx, rtx);
8886extern rtx gen_lshrv4di3_mask (rtx, rtx, rtx, rtx, rtx);
8887extern rtx gen_ashlv2di3 (rtx, rtx, rtx);
8888extern rtx gen_ashlv2di3_mask (rtx, rtx, rtx, rtx, rtx);
8889extern rtx gen_lshrv2di3 (rtx, rtx, rtx);
8890extern rtx gen_lshrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
8891extern rtx gen_ashlv16si3 (rtx, rtx, rtx);
8892extern rtx gen_ashlv16si3_mask (rtx, rtx, rtx, rtx, rtx);
8893extern rtx gen_lshrv16si3 (rtx, rtx, rtx);
8894extern rtx gen_lshrv16si3_mask (rtx, rtx, rtx, rtx, rtx);
8895extern rtx gen_ashlv8di3 (rtx, rtx, rtx);
8896extern rtx gen_ashlv8di3_mask (rtx, rtx, rtx, rtx, rtx);
8897extern rtx gen_lshrv8di3 (rtx, rtx, rtx);
8898extern rtx gen_lshrv8di3_mask (rtx, rtx, rtx, rtx, rtx);
8899extern rtx gen_avx512bw_ashlv4ti3 (rtx, rtx, rtx);
8900extern rtx gen_avx2_ashlv2ti3 (rtx, rtx, rtx);
8901extern rtx gen_sse2_ashlv1ti3 (rtx, rtx, rtx);
8902extern rtx gen_avx512bw_lshrv4ti3 (rtx, rtx, rtx);
8903extern rtx gen_avx2_lshrv2ti3 (rtx, rtx, rtx);
8904extern rtx gen_sse2_lshrv1ti3 (rtx, rtx, rtx);
8905extern rtx gen_avx512f_rolvv16si (rtx, rtx, rtx);
8906extern rtx gen_avx512f_rolvv16si_mask (rtx, rtx, rtx, rtx, rtx);
8907extern rtx gen_avx512f_rorvv16si (rtx, rtx, rtx);
8908extern rtx gen_avx512f_rorvv16si_mask (rtx, rtx, rtx, rtx, rtx);
8909extern rtx gen_avx512vl_rolvv8si (rtx, rtx, rtx);
8910extern rtx gen_avx512vl_rolvv8si_mask (rtx, rtx, rtx, rtx, rtx);
8911extern rtx gen_avx512vl_rorvv8si (rtx, rtx, rtx);
8912extern rtx gen_avx512vl_rorvv8si_mask (rtx, rtx, rtx, rtx, rtx);
8913extern rtx gen_avx512vl_rolvv4si (rtx, rtx, rtx);
8914extern rtx gen_avx512vl_rolvv4si_mask (rtx, rtx, rtx, rtx, rtx);
8915extern rtx gen_avx512vl_rorvv4si (rtx, rtx, rtx);
8916extern rtx gen_avx512vl_rorvv4si_mask (rtx, rtx, rtx, rtx, rtx);
8917extern rtx gen_avx512f_rolvv8di (rtx, rtx, rtx);
8918extern rtx gen_avx512f_rolvv8di_mask (rtx, rtx, rtx, rtx, rtx);
8919extern rtx gen_avx512f_rorvv8di (rtx, rtx, rtx);
8920extern rtx gen_avx512f_rorvv8di_mask (rtx, rtx, rtx, rtx, rtx);
8921extern rtx gen_avx512vl_rolvv4di (rtx, rtx, rtx);
8922extern rtx gen_avx512vl_rolvv4di_mask (rtx, rtx, rtx, rtx, rtx);
8923extern rtx gen_avx512vl_rorvv4di (rtx, rtx, rtx);
8924extern rtx gen_avx512vl_rorvv4di_mask (rtx, rtx, rtx, rtx, rtx);
8925extern rtx gen_avx512vl_rolvv2di (rtx, rtx, rtx);
8926extern rtx gen_avx512vl_rolvv2di_mask (rtx, rtx, rtx, rtx, rtx);
8927extern rtx gen_avx512vl_rorvv2di (rtx, rtx, rtx);
8928extern rtx gen_avx512vl_rorvv2di_mask (rtx, rtx, rtx, rtx, rtx);
8929extern rtx gen_avx512f_rolv16si (rtx, rtx, rtx);
8930extern rtx gen_avx512f_rolv16si_mask (rtx, rtx, rtx, rtx, rtx);
8931extern rtx gen_avx512f_rorv16si (rtx, rtx, rtx);
8932extern rtx gen_avx512f_rorv16si_mask (rtx, rtx, rtx, rtx, rtx);
8933extern rtx gen_avx512vl_rolv8si (rtx, rtx, rtx);
8934extern rtx gen_avx512vl_rolv8si_mask (rtx, rtx, rtx, rtx, rtx);
8935extern rtx gen_avx512vl_rorv8si (rtx, rtx, rtx);
8936extern rtx gen_avx512vl_rorv8si_mask (rtx, rtx, rtx, rtx, rtx);
8937extern rtx gen_avx512vl_rolv4si (rtx, rtx, rtx);
8938extern rtx gen_avx512vl_rolv4si_mask (rtx, rtx, rtx, rtx, rtx);
8939extern rtx gen_avx512vl_rorv4si (rtx, rtx, rtx);
8940extern rtx gen_avx512vl_rorv4si_mask (rtx, rtx, rtx, rtx, rtx);
8941extern rtx gen_avx512f_rolv8di (rtx, rtx, rtx);
8942extern rtx gen_avx512f_rolv8di_mask (rtx, rtx, rtx, rtx, rtx);
8943extern rtx gen_avx512f_rorv8di (rtx, rtx, rtx);
8944extern rtx gen_avx512f_rorv8di_mask (rtx, rtx, rtx, rtx, rtx);
8945extern rtx gen_avx512vl_rolv4di (rtx, rtx, rtx);
8946extern rtx gen_avx512vl_rolv4di_mask (rtx, rtx, rtx, rtx, rtx);
8947extern rtx gen_avx512vl_rorv4di (rtx, rtx, rtx);
8948extern rtx gen_avx512vl_rorv4di_mask (rtx, rtx, rtx, rtx, rtx);
8949extern rtx gen_avx512vl_rolv2di (rtx, rtx, rtx);
8950extern rtx gen_avx512vl_rolv2di_mask (rtx, rtx, rtx, rtx, rtx);
8951extern rtx gen_avx512vl_rorv2di (rtx, rtx, rtx);
8952extern rtx gen_avx512vl_rorv2di_mask (rtx, rtx, rtx, rtx, rtx);
8953extern rtx gen_smaxv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
8954extern rtx gen_sminv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
8955extern rtx gen_umaxv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
8956extern rtx gen_uminv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
8957extern rtx gen_smaxv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
8958extern rtx gen_sminv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
8959extern rtx gen_umaxv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
8960extern rtx gen_uminv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
8961extern rtx gen_smaxv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
8962extern rtx gen_sminv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
8963extern rtx gen_umaxv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
8964extern rtx gen_uminv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
8965extern rtx gen_smaxv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8966extern rtx gen_sminv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8967extern rtx gen_umaxv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8968extern rtx gen_uminv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
8969extern rtx gen_smaxv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8970extern rtx gen_sminv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8971extern rtx gen_umaxv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8972extern rtx gen_uminv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
8973extern rtx gen_smaxv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8974extern rtx gen_sminv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8975extern rtx gen_umaxv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8976extern rtx gen_uminv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
8977extern rtx gen_avx512bw_eqv64qi3_1 (rtx, rtx, rtx);
8978extern rtx gen_avx512bw_eqv64qi3_mask_1 (rtx, rtx, rtx, rtx);
8979extern rtx gen_avx512vl_eqv16qi3_1 (rtx, rtx, rtx);
8980extern rtx gen_avx512vl_eqv16qi3_mask_1 (rtx, rtx, rtx, rtx);
8981extern rtx gen_avx512vl_eqv32qi3_1 (rtx, rtx, rtx);
8982extern rtx gen_avx512vl_eqv32qi3_mask_1 (rtx, rtx, rtx, rtx);
8983extern rtx gen_avx512bw_eqv32hi3_1 (rtx, rtx, rtx);
8984extern rtx gen_avx512bw_eqv32hi3_mask_1 (rtx, rtx, rtx, rtx);
8985extern rtx gen_avx512vl_eqv16hi3_1 (rtx, rtx, rtx);
8986extern rtx gen_avx512vl_eqv16hi3_mask_1 (rtx, rtx, rtx, rtx);
8987extern rtx gen_avx512vl_eqv8hi3_1 (rtx, rtx, rtx);
8988extern rtx gen_avx512vl_eqv8hi3_mask_1 (rtx, rtx, rtx, rtx);
8989extern rtx gen_avx512f_eqv16si3_1 (rtx, rtx, rtx);
8990extern rtx gen_avx512f_eqv16si3_mask_1 (rtx, rtx, rtx, rtx);
8991extern rtx gen_avx512vl_eqv8si3_1 (rtx, rtx, rtx);
8992extern rtx gen_avx512vl_eqv8si3_mask_1 (rtx, rtx, rtx, rtx);
8993extern rtx gen_avx512vl_eqv4si3_1 (rtx, rtx, rtx);
8994extern rtx gen_avx512vl_eqv4si3_mask_1 (rtx, rtx, rtx, rtx);
8995extern rtx gen_avx512f_eqv8di3_1 (rtx, rtx, rtx);
8996extern rtx gen_avx512f_eqv8di3_mask_1 (rtx, rtx, rtx, rtx);
8997extern rtx gen_avx512vl_eqv4di3_1 (rtx, rtx, rtx);
8998extern rtx gen_avx512vl_eqv4di3_mask_1 (rtx, rtx, rtx, rtx);
8999extern rtx gen_avx512vl_eqv2di3_1 (rtx, rtx, rtx);
9000extern rtx gen_avx512vl_eqv2di3_mask_1 (rtx, rtx, rtx, rtx);
9001extern rtx gen_sse4_2_gtv2di3 (rtx, rtx, rtx);
9002extern rtx gen_avx2_gtv32qi3 (rtx, rtx, rtx);
9003extern rtx gen_avx2_gtv16hi3 (rtx, rtx, rtx);
9004extern rtx gen_avx2_gtv8si3 (rtx, rtx, rtx);
9005extern rtx gen_avx2_gtv4di3 (rtx, rtx, rtx);
9006extern rtx gen_avx512f_gtv16si3 (rtx, rtx, rtx);
9007extern rtx gen_avx512f_gtv16si3_mask (rtx, rtx, rtx, rtx);
9008extern rtx gen_avx512vl_gtv8si3 (rtx, rtx, rtx);
9009extern rtx gen_avx512vl_gtv8si3_mask (rtx, rtx, rtx, rtx);
9010extern rtx gen_avx512vl_gtv4si3 (rtx, rtx, rtx);
9011extern rtx gen_avx512vl_gtv4si3_mask (rtx, rtx, rtx, rtx);
9012extern rtx gen_avx512f_gtv8di3 (rtx, rtx, rtx);
9013extern rtx gen_avx512f_gtv8di3_mask (rtx, rtx, rtx, rtx);
9014extern rtx gen_avx512vl_gtv4di3 (rtx, rtx, rtx);
9015extern rtx gen_avx512vl_gtv4di3_mask (rtx, rtx, rtx, rtx);
9016extern rtx gen_avx512vl_gtv2di3 (rtx, rtx, rtx);
9017extern rtx gen_avx512vl_gtv2di3_mask (rtx, rtx, rtx, rtx);
9018extern rtx gen_avx512bw_gtv64qi3 (rtx, rtx, rtx);
9019extern rtx gen_avx512bw_gtv64qi3_mask (rtx, rtx, rtx, rtx);
9020extern rtx gen_avx512vl_gtv16qi3 (rtx, rtx, rtx);
9021extern rtx gen_avx512vl_gtv16qi3_mask (rtx, rtx, rtx, rtx);
9022extern rtx gen_avx512vl_gtv32qi3 (rtx, rtx, rtx);
9023extern rtx gen_avx512vl_gtv32qi3_mask (rtx, rtx, rtx, rtx);
9024extern rtx gen_avx512bw_gtv32hi3 (rtx, rtx, rtx);
9025extern rtx gen_avx512bw_gtv32hi3_mask (rtx, rtx, rtx, rtx);
9026extern rtx gen_avx512vl_gtv16hi3 (rtx, rtx, rtx);
9027extern rtx gen_avx512vl_gtv16hi3_mask (rtx, rtx, rtx, rtx);
9028extern rtx gen_avx512vl_gtv8hi3 (rtx, rtx, rtx);
9029extern rtx gen_avx512vl_gtv8hi3_mask (rtx, rtx, rtx, rtx);
9030extern rtx gen_sse2_gtv16qi3 (rtx, rtx, rtx);
9031extern rtx gen_sse2_gtv8hi3 (rtx, rtx, rtx);
9032extern rtx gen_sse2_gtv4si3 (rtx, rtx, rtx);
9033extern rtx gen_andv16si3_mask (rtx, rtx, rtx, rtx, rtx);
9034extern rtx gen_iorv16si3_mask (rtx, rtx, rtx, rtx, rtx);
9035extern rtx gen_xorv16si3_mask (rtx, rtx, rtx, rtx, rtx);
9036extern rtx gen_andv8si3_mask (rtx, rtx, rtx, rtx, rtx);
9037extern rtx gen_iorv8si3_mask (rtx, rtx, rtx, rtx, rtx);
9038extern rtx gen_xorv8si3_mask (rtx, rtx, rtx, rtx, rtx);
9039extern rtx gen_andv4si3_mask (rtx, rtx, rtx, rtx, rtx);
9040extern rtx gen_iorv4si3_mask (rtx, rtx, rtx, rtx, rtx);
9041extern rtx gen_xorv4si3_mask (rtx, rtx, rtx, rtx, rtx);
9042extern rtx gen_andv8di3_mask (rtx, rtx, rtx, rtx, rtx);
9043extern rtx gen_iorv8di3_mask (rtx, rtx, rtx, rtx, rtx);
9044extern rtx gen_xorv8di3_mask (rtx, rtx, rtx, rtx, rtx);
9045extern rtx gen_andv4di3_mask (rtx, rtx, rtx, rtx, rtx);
9046extern rtx gen_iorv4di3_mask (rtx, rtx, rtx, rtx, rtx);
9047extern rtx gen_xorv4di3_mask (rtx, rtx, rtx, rtx, rtx);
9048extern rtx gen_andv2di3_mask (rtx, rtx, rtx, rtx, rtx);
9049extern rtx gen_iorv2di3_mask (rtx, rtx, rtx, rtx, rtx);
9050extern rtx gen_xorv2di3_mask (rtx, rtx, rtx, rtx, rtx);
9051extern rtx gen_avx512bw_testmv64qi3 (rtx, rtx, rtx);
9052extern rtx gen_avx512bw_testmv64qi3_mask (rtx, rtx, rtx, rtx);
9053extern rtx gen_avx512vl_testmv16qi3 (rtx, rtx, rtx);
9054extern rtx gen_avx512vl_testmv16qi3_mask (rtx, rtx, rtx, rtx);
9055extern rtx gen_avx512vl_testmv32qi3 (rtx, rtx, rtx);
9056extern rtx gen_avx512vl_testmv32qi3_mask (rtx, rtx, rtx, rtx);
9057extern rtx gen_avx512bw_testmv32hi3 (rtx, rtx, rtx);
9058extern rtx gen_avx512bw_testmv32hi3_mask (rtx, rtx, rtx, rtx);
9059extern rtx gen_avx512vl_testmv16hi3 (rtx, rtx, rtx);
9060extern rtx gen_avx512vl_testmv16hi3_mask (rtx, rtx, rtx, rtx);
9061extern rtx gen_avx512vl_testmv8hi3 (rtx, rtx, rtx);
9062extern rtx gen_avx512vl_testmv8hi3_mask (rtx, rtx, rtx, rtx);
9063extern rtx gen_avx512f_testmv16si3 (rtx, rtx, rtx);
9064extern rtx gen_avx512f_testmv16si3_mask (rtx, rtx, rtx, rtx);
9065extern rtx gen_avx512vl_testmv8si3 (rtx, rtx, rtx);
9066extern rtx gen_avx512vl_testmv8si3_mask (rtx, rtx, rtx, rtx);
9067extern rtx gen_avx512vl_testmv4si3 (rtx, rtx, rtx);
9068extern rtx gen_avx512vl_testmv4si3_mask (rtx, rtx, rtx, rtx);
9069extern rtx gen_avx512f_testmv8di3 (rtx, rtx, rtx);
9070extern rtx gen_avx512f_testmv8di3_mask (rtx, rtx, rtx, rtx);
9071extern rtx gen_avx512vl_testmv4di3 (rtx, rtx, rtx);
9072extern rtx gen_avx512vl_testmv4di3_mask (rtx, rtx, rtx, rtx);
9073extern rtx gen_avx512vl_testmv2di3 (rtx, rtx, rtx);
9074extern rtx gen_avx512vl_testmv2di3_mask (rtx, rtx, rtx, rtx);
9075extern rtx gen_avx512bw_testnmv64qi3 (rtx, rtx, rtx);
9076extern rtx gen_avx512bw_testnmv64qi3_mask (rtx, rtx, rtx, rtx);
9077extern rtx gen_avx512vl_testnmv16qi3 (rtx, rtx, rtx);
9078extern rtx gen_avx512vl_testnmv16qi3_mask (rtx, rtx, rtx, rtx);
9079extern rtx gen_avx512vl_testnmv32qi3 (rtx, rtx, rtx);
9080extern rtx gen_avx512vl_testnmv32qi3_mask (rtx, rtx, rtx, rtx);
9081extern rtx gen_avx512bw_testnmv32hi3 (rtx, rtx, rtx);
9082extern rtx gen_avx512bw_testnmv32hi3_mask (rtx, rtx, rtx, rtx);
9083extern rtx gen_avx512vl_testnmv16hi3 (rtx, rtx, rtx);
9084extern rtx gen_avx512vl_testnmv16hi3_mask (rtx, rtx, rtx, rtx);
9085extern rtx gen_avx512vl_testnmv8hi3 (rtx, rtx, rtx);
9086extern rtx gen_avx512vl_testnmv8hi3_mask (rtx, rtx, rtx, rtx);
9087extern rtx gen_avx512f_testnmv16si3 (rtx, rtx, rtx);
9088extern rtx gen_avx512f_testnmv16si3_mask (rtx, rtx, rtx, rtx);
9089extern rtx gen_avx512vl_testnmv8si3 (rtx, rtx, rtx);
9090extern rtx gen_avx512vl_testnmv8si3_mask (rtx, rtx, rtx, rtx);
9091extern rtx gen_avx512vl_testnmv4si3 (rtx, rtx, rtx);
9092extern rtx gen_avx512vl_testnmv4si3_mask (rtx, rtx, rtx, rtx);
9093extern rtx gen_avx512f_testnmv8di3 (rtx, rtx, rtx);
9094extern rtx gen_avx512f_testnmv8di3_mask (rtx, rtx, rtx, rtx);
9095extern rtx gen_avx512vl_testnmv4di3 (rtx, rtx, rtx);
9096extern rtx gen_avx512vl_testnmv4di3_mask (rtx, rtx, rtx, rtx);
9097extern rtx gen_avx512vl_testnmv2di3 (rtx, rtx, rtx);
9098extern rtx gen_avx512vl_testnmv2di3_mask (rtx, rtx, rtx, rtx);
9099extern rtx gen_avx512bw_packsswb (rtx, rtx, rtx);
9100extern rtx gen_avx512bw_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
9101extern rtx gen_avx2_packsswb (rtx, rtx, rtx);
9102extern rtx gen_avx2_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
9103extern rtx gen_sse2_packsswb (rtx, rtx, rtx);
9104extern rtx gen_sse2_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
9105extern rtx gen_avx512bw_packssdw (rtx, rtx, rtx);
9106extern rtx gen_avx512bw_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
9107extern rtx gen_avx2_packssdw (rtx, rtx, rtx);
9108extern rtx gen_avx2_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
9109extern rtx gen_sse2_packssdw (rtx, rtx, rtx);
9110extern rtx gen_sse2_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
9111extern rtx gen_avx512bw_packuswb (rtx, rtx, rtx);
9112extern rtx gen_avx512bw_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
9113extern rtx gen_avx2_packuswb (rtx, rtx, rtx);
9114extern rtx gen_avx2_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
9115extern rtx gen_sse2_packuswb (rtx, rtx, rtx);
9116extern rtx gen_sse2_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
9117extern rtx gen_avx512bw_interleave_highv64qi (rtx, rtx, rtx);
9118extern rtx gen_avx512bw_interleave_highv64qi_mask (rtx, rtx, rtx, rtx, rtx);
9119extern rtx gen_avx2_interleave_highv32qi (rtx, rtx, rtx);
9120extern rtx gen_avx2_interleave_highv32qi_mask (rtx, rtx, rtx, rtx, rtx);
9121extern rtx gen_vec_interleave_highv16qi (rtx, rtx, rtx);
9122extern rtx gen_vec_interleave_highv16qi_mask (rtx, rtx, rtx, rtx, rtx);
9123extern rtx gen_avx512bw_interleave_lowv64qi (rtx, rtx, rtx);
9124extern rtx gen_avx512bw_interleave_lowv64qi_mask (rtx, rtx, rtx, rtx, rtx);
9125extern rtx gen_avx2_interleave_lowv32qi (rtx, rtx, rtx);
9126extern rtx gen_avx2_interleave_lowv32qi_mask (rtx, rtx, rtx, rtx, rtx);
9127extern rtx gen_vec_interleave_lowv16qi (rtx, rtx, rtx);
9128extern rtx gen_vec_interleave_lowv16qi_mask (rtx, rtx, rtx, rtx, rtx);
9129extern rtx gen_avx512bw_interleave_highv32hi (rtx, rtx, rtx);
9130extern rtx gen_avx512bw_interleave_highv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9131extern rtx gen_avx2_interleave_highv16hi (rtx, rtx, rtx);
9132extern rtx gen_avx2_interleave_highv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9133extern rtx gen_vec_interleave_highv8hi (rtx, rtx, rtx);
9134extern rtx gen_vec_interleave_highv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9135extern rtx gen_avx512bw_interleave_lowv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9136extern rtx gen_avx2_interleave_lowv16hi (rtx, rtx, rtx);
9137extern rtx gen_avx2_interleave_lowv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9138extern rtx gen_vec_interleave_lowv8hi (rtx, rtx, rtx);
9139extern rtx gen_vec_interleave_lowv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9140extern rtx gen_avx2_interleave_highv8si (rtx, rtx, rtx);
9141extern rtx gen_avx2_interleave_highv8si_mask (rtx, rtx, rtx, rtx, rtx);
9142extern rtx gen_avx512f_interleave_highv16si_mask (rtx, rtx, rtx, rtx, rtx);
9143extern rtx gen_vec_interleave_highv4si (rtx, rtx, rtx);
9144extern rtx gen_vec_interleave_highv4si_mask (rtx, rtx, rtx, rtx, rtx);
9145extern rtx gen_avx2_interleave_lowv8si (rtx, rtx, rtx);
9146extern rtx gen_avx2_interleave_lowv8si_mask (rtx, rtx, rtx, rtx, rtx);
9147extern rtx gen_avx512f_interleave_lowv16si_mask (rtx, rtx, rtx, rtx, rtx);
9148extern rtx gen_vec_interleave_lowv4si (rtx, rtx, rtx);
9149extern rtx gen_vec_interleave_lowv4si_mask (rtx, rtx, rtx, rtx, rtx);
9150extern rtx gen_sse4_1_pinsrb (rtx, rtx, rtx, rtx);
9151extern rtx gen_sse2_pinsrw (rtx, rtx, rtx, rtx);
9152extern rtx gen_sse4_1_pinsrd (rtx, rtx, rtx, rtx);
9153extern rtx gen_sse4_1_pinsrq (rtx, rtx, rtx, rtx);
9154extern rtx gen_avx512dq_vinsertf64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9155extern rtx gen_avx512dq_vinserti64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9156extern rtx gen_avx512f_vinsertf32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9157extern rtx gen_avx512f_vinserti32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9158extern rtx gen_vec_set_lo_v16sf (rtx, rtx, rtx);
9159extern rtx gen_vec_set_lo_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
9160extern rtx gen_vec_set_lo_v16si (rtx, rtx, rtx);
9161extern rtx gen_vec_set_lo_v16si_mask (rtx, rtx, rtx, rtx, rtx);
9162extern rtx gen_vec_set_hi_v16sf (rtx, rtx, rtx);
9163extern rtx gen_vec_set_hi_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
9164extern rtx gen_vec_set_hi_v16si (rtx, rtx, rtx);
9165extern rtx gen_vec_set_hi_v16si_mask (rtx, rtx, rtx, rtx, rtx);
9166extern rtx gen_vec_set_lo_v8df (rtx, rtx, rtx);
9167extern rtx gen_vec_set_lo_v8df_mask (rtx, rtx, rtx, rtx, rtx);
9168extern rtx gen_vec_set_lo_v8di (rtx, rtx, rtx);
9169extern rtx gen_vec_set_lo_v8di_mask (rtx, rtx, rtx, rtx, rtx);
9170extern rtx gen_vec_set_hi_v8df (rtx, rtx, rtx);
9171extern rtx gen_vec_set_hi_v8df_mask (rtx, rtx, rtx, rtx, rtx);
9172extern rtx gen_vec_set_hi_v8di (rtx, rtx, rtx);
9173extern rtx gen_vec_set_hi_v8di_mask (rtx, rtx, rtx, rtx, rtx);
9174extern rtx gen_avx512dq_shuf_i64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9175extern rtx gen_avx512dq_shuf_f64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9176extern rtx gen_avx512f_shuf_f64x2_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9177extern rtx gen_avx512f_shuf_f64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9178extern rtx gen_avx512f_shuf_i64x2_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9179extern rtx gen_avx512f_shuf_i64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9180extern rtx gen_avx512vl_shuf_i32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9181extern rtx gen_avx512vl_shuf_i32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9182extern rtx gen_avx512vl_shuf_f32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9183extern rtx gen_avx512vl_shuf_f32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9184extern rtx gen_avx512f_shuf_f32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9185extern rtx gen_avx512f_shuf_f32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9186extern rtx gen_avx512f_shuf_i32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9187extern rtx gen_avx512f_shuf_i32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9188extern rtx gen_avx512f_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9189extern rtx gen_avx512f_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9190extern rtx gen_avx2_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9191extern rtx gen_avx2_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9192extern rtx gen_sse2_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9193extern rtx gen_sse2_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9194extern rtx gen_avx512bw_pshuflwv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9195extern rtx gen_avx2_pshuflw_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9196extern rtx gen_avx2_pshuflw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9197extern rtx gen_sse2_pshuflw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9198extern rtx gen_sse2_pshuflw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9199extern rtx gen_avx512bw_pshufhwv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9200extern rtx gen_avx2_pshufhw_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9201extern rtx gen_avx2_pshufhw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9202extern rtx gen_sse2_pshufhw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9203extern rtx gen_sse2_pshufhw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9204extern rtx gen_sse2_loadld (rtx, rtx, rtx);
9205extern rtx gen_vec_concatv2di (rtx, rtx, rtx);
9206extern rtx gen_avx512f_psadbw (rtx, rtx, rtx);
9207extern rtx gen_avx2_psadbw (rtx, rtx, rtx);
9208extern rtx gen_sse2_psadbw (rtx, rtx, rtx);
9209extern rtx gen_avx_movmskps256 (rtx, rtx);
9210extern rtx gen_sse_movmskps (rtx, rtx);
9211extern rtx gen_avx_movmskpd256 (rtx, rtx);
9212extern rtx gen_sse2_movmskpd (rtx, rtx);
9213extern rtx gen_avx2_pmovmskb (rtx, rtx);
9214extern rtx gen_sse2_pmovmskb (rtx, rtx);
9215extern rtx gen_sse_ldmxcsr (rtx);
9216extern rtx gen_sse_stmxcsr (rtx);
9217extern rtx gen_sse2_clflush (rtx);
9218extern rtx gen_sse3_mwait (rtx, rtx);
9219extern rtx gen_sse3_monitor_si (rtx, rtx, rtx);
9220extern rtx gen_sse3_monitor_di (rtx, rtx, rtx);
9221extern rtx gen_avx2_phaddwv16hi3 (rtx, rtx, rtx);
9222extern rtx gen_avx2_phaddswv16hi3 (rtx, rtx, rtx);
9223extern rtx gen_avx2_phsubwv16hi3 (rtx, rtx, rtx);
9224extern rtx gen_avx2_phsubswv16hi3 (rtx, rtx, rtx);
9225extern rtx gen_ssse3_phaddwv8hi3 (rtx, rtx, rtx);
9226extern rtx gen_ssse3_phaddswv8hi3 (rtx, rtx, rtx);
9227extern rtx gen_ssse3_phsubwv8hi3 (rtx, rtx, rtx);
9228extern rtx gen_ssse3_phsubswv8hi3 (rtx, rtx, rtx);
9229extern rtx gen_ssse3_phaddwv4hi3 (rtx, rtx, rtx);
9230extern rtx gen_ssse3_phaddswv4hi3 (rtx, rtx, rtx);
9231extern rtx gen_ssse3_phsubwv4hi3 (rtx, rtx, rtx);
9232extern rtx gen_ssse3_phsubswv4hi3 (rtx, rtx, rtx);
9233extern rtx gen_avx2_phadddv8si3 (rtx, rtx, rtx);
9234extern rtx gen_avx2_phsubdv8si3 (rtx, rtx, rtx);
9235extern rtx gen_ssse3_phadddv4si3 (rtx, rtx, rtx);
9236extern rtx gen_ssse3_phsubdv4si3 (rtx, rtx, rtx);
9237extern rtx gen_ssse3_phadddv2si3 (rtx, rtx, rtx);
9238extern rtx gen_ssse3_phsubdv2si3 (rtx, rtx, rtx);
9239extern rtx gen_avx2_pmaddubsw256 (rtx, rtx, rtx);
9240extern rtx gen_avx512bw_pmaddubsw512v8hi (rtx, rtx, rtx);
9241extern rtx gen_avx512bw_pmaddubsw512v8hi_mask (rtx, rtx, rtx, rtx, rtx);
9242extern rtx gen_avx512bw_pmaddubsw512v16hi (rtx, rtx, rtx);
9243extern rtx gen_avx512bw_pmaddubsw512v16hi_mask (rtx, rtx, rtx, rtx, rtx);
9244extern rtx gen_avx512bw_pmaddubsw512v32hi (rtx, rtx, rtx);
9245extern rtx gen_avx512bw_pmaddubsw512v32hi_mask (rtx, rtx, rtx, rtx, rtx);
9246extern rtx gen_avx512bw_umulhrswv32hi3 (rtx, rtx, rtx);
9247extern rtx gen_avx512bw_umulhrswv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
9248extern rtx gen_ssse3_pmaddubsw128 (rtx, rtx, rtx);
9249extern rtx gen_ssse3_pmaddubsw (rtx, rtx, rtx);
9250extern rtx gen_avx512bw_pshufbv64qi3 (rtx, rtx, rtx);
9251extern rtx gen_avx512bw_pshufbv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
9252extern rtx gen_avx2_pshufbv32qi3 (rtx, rtx, rtx);
9253extern rtx gen_avx2_pshufbv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
9254extern rtx gen_ssse3_pshufbv16qi3 (rtx, rtx, rtx);
9255extern rtx gen_ssse3_pshufbv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
9256extern rtx gen_ssse3_pshufbv8qi3 (rtx, rtx, rtx);
9257extern rtx gen_avx2_psignv32qi3 (rtx, rtx, rtx);
9258extern rtx gen_ssse3_psignv16qi3 (rtx, rtx, rtx);
9259extern rtx gen_avx2_psignv16hi3 (rtx, rtx, rtx);
9260extern rtx gen_ssse3_psignv8hi3 (rtx, rtx, rtx);
9261extern rtx gen_avx2_psignv8si3 (rtx, rtx, rtx);
9262extern rtx gen_ssse3_psignv4si3 (rtx, rtx, rtx);
9263extern rtx gen_ssse3_psignv8qi3 (rtx, rtx, rtx);
9264extern rtx gen_ssse3_psignv4hi3 (rtx, rtx, rtx);
9265extern rtx gen_ssse3_psignv2si3 (rtx, rtx, rtx);
9266extern rtx gen_avx512bw_palignrv64qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9267extern rtx gen_avx2_palignrv32qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9268extern rtx gen_ssse3_palignrv16qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9269extern rtx gen_avx512bw_palignrv4ti (rtx, rtx, rtx, rtx);
9270extern rtx gen_avx2_palignrv2ti (rtx, rtx, rtx, rtx);
9271extern rtx gen_ssse3_palignrti (rtx, rtx, rtx, rtx);
9272extern rtx gen_ssse3_palignrdi (rtx, rtx, rtx, rtx);
9273extern rtx gen_absv16si2_mask (rtx, rtx, rtx, rtx);
9274extern rtx gen_absv8si2_mask (rtx, rtx, rtx, rtx);
9275extern rtx gen_absv4si2_mask (rtx, rtx, rtx, rtx);
9276extern rtx gen_absv8di2_mask (rtx, rtx, rtx, rtx);
9277extern rtx gen_absv4di2_mask (rtx, rtx, rtx, rtx);
9278extern rtx gen_absv2di2_mask (rtx, rtx, rtx, rtx);
9279extern rtx gen_absv64qi2_mask (rtx, rtx, rtx, rtx);
9280extern rtx gen_absv16qi2_mask (rtx, rtx, rtx, rtx);
9281extern rtx gen_absv32qi2_mask (rtx, rtx, rtx, rtx);
9282extern rtx gen_absv32hi2_mask (rtx, rtx, rtx, rtx);
9283extern rtx gen_absv16hi2_mask (rtx, rtx, rtx, rtx);
9284extern rtx gen_absv8hi2_mask (rtx, rtx, rtx, rtx);
9285extern rtx gen_absv8qi2 (rtx, rtx);
9286extern rtx gen_absv4hi2 (rtx, rtx);
9287extern rtx gen_absv2si2 (rtx, rtx);
9288extern rtx gen_sse4a_movntsf (rtx, rtx);
9289extern rtx gen_sse4a_movntdf (rtx, rtx);
9290extern rtx gen_sse4a_vmmovntv4sf (rtx, rtx);
9291extern rtx gen_sse4a_vmmovntv2df (rtx, rtx);
9292extern rtx gen_sse4a_extrqi (rtx, rtx, rtx, rtx);
9293extern rtx gen_sse4a_extrq (rtx, rtx, rtx);
9294extern rtx gen_sse4a_insertqi (rtx, rtx, rtx, rtx, rtx);
9295extern rtx gen_sse4a_insertq (rtx, rtx, rtx);
9296extern rtx gen_avx_blendps256 (rtx, rtx, rtx, rtx);
9297extern rtx gen_sse4_1_blendps (rtx, rtx, rtx, rtx);
9298extern rtx gen_avx_blendpd256 (rtx, rtx, rtx, rtx);
9299extern rtx gen_sse4_1_blendpd (rtx, rtx, rtx, rtx);
9300extern rtx gen_avx_blendvps256 (rtx, rtx, rtx, rtx);
9301extern rtx gen_sse4_1_blendvps (rtx, rtx, rtx, rtx);
9302extern rtx gen_avx_blendvpd256 (rtx, rtx, rtx, rtx);
9303extern rtx gen_sse4_1_blendvpd (rtx, rtx, rtx, rtx);
9304extern rtx gen_avx_dpps256 (rtx, rtx, rtx, rtx);
9305extern rtx gen_sse4_1_dpps (rtx, rtx, rtx, rtx);
9306extern rtx gen_avx_dppd256 (rtx, rtx, rtx, rtx);
9307extern rtx gen_sse4_1_dppd (rtx, rtx, rtx, rtx);
9308extern rtx gen_avx512f_movntdqa (rtx, rtx);
9309extern rtx gen_avx2_movntdqa (rtx, rtx);
9310extern rtx gen_sse4_1_movntdqa (rtx, rtx);
9311extern rtx gen_avx2_mpsadbw (rtx, rtx, rtx, rtx);
9312extern rtx gen_sse4_1_mpsadbw (rtx, rtx, rtx, rtx);
9313extern rtx gen_avx512bw_packusdw (rtx, rtx, rtx);
9314extern rtx gen_avx512bw_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
9315extern rtx gen_avx2_packusdw (rtx, rtx, rtx);
9316extern rtx gen_avx2_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
9317extern rtx gen_sse4_1_packusdw (rtx, rtx, rtx);
9318extern rtx gen_sse4_1_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
9319extern rtx gen_avx2_pblendvb (rtx, rtx, rtx, rtx);
9320extern rtx gen_sse4_1_pblendvb (rtx, rtx, rtx, rtx);
9321extern rtx gen_sse4_1_pblendw (rtx, rtx, rtx, rtx);
9322extern rtx gen_avx2_pblenddv8si (rtx, rtx, rtx, rtx);
9323extern rtx gen_avx2_pblenddv4si (rtx, rtx, rtx, rtx);
9324extern rtx gen_sse4_1_phminposuw (rtx, rtx);
9325extern rtx gen_avx2_sign_extendv16qiv16hi2 (rtx, rtx);
9326extern rtx gen_avx2_sign_extendv16qiv16hi2_mask (rtx, rtx, rtx, rtx);
9327extern rtx gen_avx2_zero_extendv16qiv16hi2 (rtx, rtx);
9328extern rtx gen_avx2_zero_extendv16qiv16hi2_mask (rtx, rtx, rtx, rtx);
9329extern rtx gen_avx512bw_sign_extendv32qiv32hi2 (rtx, rtx);
9330extern rtx gen_avx512bw_sign_extendv32qiv32hi2_mask (rtx, rtx, rtx, rtx);
9331extern rtx gen_avx512bw_zero_extendv32qiv32hi2 (rtx, rtx);
9332extern rtx gen_avx512bw_zero_extendv32qiv32hi2_mask (rtx, rtx, rtx, rtx);
9333extern rtx gen_sse4_1_sign_extendv8qiv8hi2 (rtx, rtx);
9334extern rtx gen_sse4_1_sign_extendv8qiv8hi2_mask (rtx, rtx, rtx, rtx);
9335extern rtx gen_sse4_1_zero_extendv8qiv8hi2 (rtx, rtx);
9336extern rtx gen_sse4_1_zero_extendv8qiv8hi2_mask (rtx, rtx, rtx, rtx);
9337extern rtx gen_avx512f_sign_extendv16qiv16si2_mask (rtx, rtx, rtx, rtx);
9338extern rtx gen_avx512f_zero_extendv16qiv16si2_mask (rtx, rtx, rtx, rtx);
9339extern rtx gen_avx2_sign_extendv8qiv8si2 (rtx, rtx);
9340extern rtx gen_avx2_sign_extendv8qiv8si2_mask (rtx, rtx, rtx, rtx);
9341extern rtx gen_avx2_zero_extendv8qiv8si2 (rtx, rtx);
9342extern rtx gen_avx2_zero_extendv8qiv8si2_mask (rtx, rtx, rtx, rtx);
9343extern rtx gen_sse4_1_sign_extendv4qiv4si2 (rtx, rtx);
9344extern rtx gen_sse4_1_sign_extendv4qiv4si2_mask (rtx, rtx, rtx, rtx);
9345extern rtx gen_sse4_1_zero_extendv4qiv4si2 (rtx, rtx);
9346extern rtx gen_sse4_1_zero_extendv4qiv4si2_mask (rtx, rtx, rtx, rtx);
9347extern rtx gen_avx512f_sign_extendv16hiv16si2 (rtx, rtx);
9348extern rtx gen_avx512f_sign_extendv16hiv16si2_mask (rtx, rtx, rtx, rtx);
9349extern rtx gen_avx512f_zero_extendv16hiv16si2 (rtx, rtx);
9350extern rtx gen_avx512f_zero_extendv16hiv16si2_mask (rtx, rtx, rtx, rtx);
9351extern rtx gen_avx2_sign_extendv8hiv8si2 (rtx, rtx);
9352extern rtx gen_avx2_sign_extendv8hiv8si2_mask (rtx, rtx, rtx, rtx);
9353extern rtx gen_avx2_zero_extendv8hiv8si2 (rtx, rtx);
9354extern rtx gen_avx2_zero_extendv8hiv8si2_mask (rtx, rtx, rtx, rtx);
9355extern rtx gen_sse4_1_sign_extendv4hiv4si2 (rtx, rtx);
9356extern rtx gen_sse4_1_sign_extendv4hiv4si2_mask (rtx, rtx, rtx, rtx);
9357extern rtx gen_sse4_1_zero_extendv4hiv4si2 (rtx, rtx);
9358extern rtx gen_sse4_1_zero_extendv4hiv4si2_mask (rtx, rtx, rtx, rtx);
9359extern rtx gen_avx512f_sign_extendv8qiv8di2 (rtx, rtx);
9360extern rtx gen_avx512f_sign_extendv8qiv8di2_mask (rtx, rtx, rtx, rtx);
9361extern rtx gen_avx512f_zero_extendv8qiv8di2 (rtx, rtx);
9362extern rtx gen_avx512f_zero_extendv8qiv8di2_mask (rtx, rtx, rtx, rtx);
9363extern rtx gen_avx2_sign_extendv4qiv4di2 (rtx, rtx);
9364extern rtx gen_avx2_sign_extendv4qiv4di2_mask (rtx, rtx, rtx, rtx);
9365extern rtx gen_avx2_zero_extendv4qiv4di2 (rtx, rtx);
9366extern rtx gen_avx2_zero_extendv4qiv4di2_mask (rtx, rtx, rtx, rtx);
9367extern rtx gen_sse4_1_sign_extendv2qiv2di2 (rtx, rtx);
9368extern rtx gen_sse4_1_sign_extendv2qiv2di2_mask (rtx, rtx, rtx, rtx);
9369extern rtx gen_sse4_1_zero_extendv2qiv2di2 (rtx, rtx);
9370extern rtx gen_sse4_1_zero_extendv2qiv2di2_mask (rtx, rtx, rtx, rtx);
9371extern rtx gen_avx512f_sign_extendv8hiv8di2 (rtx, rtx);
9372extern rtx gen_avx512f_sign_extendv8hiv8di2_mask (rtx, rtx, rtx, rtx);
9373extern rtx gen_avx512f_zero_extendv8hiv8di2 (rtx, rtx);
9374extern rtx gen_avx512f_zero_extendv8hiv8di2_mask (rtx, rtx, rtx, rtx);
9375extern rtx gen_avx2_sign_extendv4hiv4di2 (rtx, rtx);
9376extern rtx gen_avx2_sign_extendv4hiv4di2_mask (rtx, rtx, rtx, rtx);
9377extern rtx gen_avx2_zero_extendv4hiv4di2 (rtx, rtx);
9378extern rtx gen_avx2_zero_extendv4hiv4di2_mask (rtx, rtx, rtx, rtx);
9379extern rtx gen_sse4_1_sign_extendv2hiv2di2 (rtx, rtx);
9380extern rtx gen_sse4_1_sign_extendv2hiv2di2_mask (rtx, rtx, rtx, rtx);
9381extern rtx gen_sse4_1_zero_extendv2hiv2di2 (rtx, rtx);
9382extern rtx gen_sse4_1_zero_extendv2hiv2di2_mask (rtx, rtx, rtx, rtx);
9383extern rtx gen_avx512f_sign_extendv8siv8di2 (rtx, rtx);
9384extern rtx gen_avx512f_sign_extendv8siv8di2_mask (rtx, rtx, rtx, rtx);
9385extern rtx gen_avx512f_zero_extendv8siv8di2 (rtx, rtx);
9386extern rtx gen_avx512f_zero_extendv8siv8di2_mask (rtx, rtx, rtx, rtx);
9387extern rtx gen_avx2_sign_extendv4siv4di2 (rtx, rtx);
9388extern rtx gen_avx2_sign_extendv4siv4di2_mask (rtx, rtx, rtx, rtx);
9389extern rtx gen_avx2_zero_extendv4siv4di2 (rtx, rtx);
9390extern rtx gen_avx2_zero_extendv4siv4di2_mask (rtx, rtx, rtx, rtx);
9391extern rtx gen_sse4_1_sign_extendv2siv2di2 (rtx, rtx);
9392extern rtx gen_sse4_1_sign_extendv2siv2di2_mask (rtx, rtx, rtx, rtx);
9393extern rtx gen_sse4_1_zero_extendv2siv2di2 (rtx, rtx);
9394extern rtx gen_sse4_1_zero_extendv2siv2di2_mask (rtx, rtx, rtx, rtx);
9395extern rtx gen_avx_vtestps256 (rtx, rtx);
9396extern rtx gen_avx_vtestps (rtx, rtx);
9397extern rtx gen_avx_vtestpd256 (rtx, rtx);
9398extern rtx gen_avx_vtestpd (rtx, rtx);
9399extern rtx gen_sse4_1_ptestv16qi (rtx, rtx);
9400extern rtx gen_sse4_1_ptestv8hi (rtx, rtx);
9401extern rtx gen_sse4_1_ptestv4si (rtx, rtx);
9402extern rtx gen_sse4_1_ptestv2di (rtx, rtx);
9403extern rtx gen_sse4_1_ptestv4sf (rtx, rtx);
9404extern rtx gen_sse4_1_ptestv2df (rtx, rtx);
9405extern rtx gen_avx_ptestv32qi (rtx, rtx);
9406extern rtx gen_avx_ptestv16hi (rtx, rtx);
9407extern rtx gen_avx_ptestv8si (rtx, rtx);
9408extern rtx gen_avx_ptestv4di (rtx, rtx);
9409extern rtx gen_avx_ptestv8sf (rtx, rtx);
9410extern rtx gen_avx_ptestv4df (rtx, rtx);
9411extern rtx gen_ptesttf2 (rtx, rtx);
9412extern rtx gen_avx_roundps256 (rtx, rtx, rtx);
9413extern rtx gen_sse4_1_roundps (rtx, rtx, rtx);
9414extern rtx gen_avx_roundpd256 (rtx, rtx, rtx);
9415extern rtx gen_sse4_1_roundpd (rtx, rtx, rtx);
9416extern rtx gen_sse4_1_roundss (rtx, rtx, rtx, rtx);
9417extern rtx gen_sse4_1_roundsd (rtx, rtx, rtx, rtx);
9418extern rtx gen_sse4_2_pcmpestr (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9419extern rtx gen_sse4_2_pcmpestri (rtx, rtx, rtx, rtx, rtx, rtx);
9420extern rtx gen_sse4_2_pcmpestrm (rtx, rtx, rtx, rtx, rtx, rtx);
9421extern rtx gen_sse4_2_pcmpestr_cconly (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9422extern rtx gen_sse4_2_pcmpistr (rtx, rtx, rtx, rtx, rtx);
9423extern rtx gen_sse4_2_pcmpistri (rtx, rtx, rtx, rtx);
9424extern rtx gen_sse4_2_pcmpistrm (rtx, rtx, rtx, rtx);
9425extern rtx gen_sse4_2_pcmpistr_cconly (rtx, rtx, rtx, rtx, rtx);
9426extern rtx gen_avx512er_exp2v16sf (rtx, rtx);
9427extern rtx gen_avx512er_exp2v16sf_round (rtx, rtx, rtx);
9428extern rtx gen_avx512er_exp2v16sf_mask (rtx, rtx, rtx, rtx);
9429extern rtx gen_avx512er_exp2v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
9430extern rtx gen_avx512er_exp2v8df (rtx, rtx);
9431extern rtx gen_avx512er_exp2v8df_round (rtx, rtx, rtx);
9432extern rtx gen_avx512er_exp2v8df_mask (rtx, rtx, rtx, rtx);
9433extern rtx gen_avx512er_exp2v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
9434extern rtx gen_avx512er_rcp28v16sf_mask (rtx, rtx, rtx, rtx);
9435extern rtx gen_avx512er_rcp28v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
9436extern rtx gen_avx512er_rcp28v8df_mask (rtx, rtx, rtx, rtx);
9437extern rtx gen_avx512er_rcp28v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
9438extern rtx gen_avx512er_vmrcp28v4sf (rtx, rtx, rtx);
9439extern rtx gen_avx512er_vmrcp28v4sf_round (rtx, rtx, rtx, rtx);
9440extern rtx gen_avx512er_vmrcp28v2df (rtx, rtx, rtx);
9441extern rtx gen_avx512er_vmrcp28v2df_round (rtx, rtx, rtx, rtx);
9442extern rtx gen_avx512er_rsqrt28v16sf_mask (rtx, rtx, rtx, rtx);
9443extern rtx gen_avx512er_rsqrt28v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
9444extern rtx gen_avx512er_rsqrt28v8df_mask (rtx, rtx, rtx, rtx);
9445extern rtx gen_avx512er_rsqrt28v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
9446extern rtx gen_avx512er_vmrsqrt28v4sf (rtx, rtx, rtx);
9447extern rtx gen_avx512er_vmrsqrt28v4sf_round (rtx, rtx, rtx, rtx);
9448extern rtx gen_avx512er_vmrsqrt28v2df (rtx, rtx, rtx);
9449extern rtx gen_avx512er_vmrsqrt28v2df_round (rtx, rtx, rtx, rtx);
9450extern rtx gen_xop_pmacsww (rtx, rtx, rtx, rtx);
9451extern rtx gen_xop_pmacssww (rtx, rtx, rtx, rtx);
9452extern rtx gen_xop_pmacsdd (rtx, rtx, rtx, rtx);
9453extern rtx gen_xop_pmacssdd (rtx, rtx, rtx, rtx);
9454extern rtx gen_xop_pmacsdql (rtx, rtx, rtx, rtx);
9455extern rtx gen_xop_pmacssdql (rtx, rtx, rtx, rtx);
9456extern rtx gen_xop_pmacsdqh (rtx, rtx, rtx, rtx);
9457extern rtx gen_xop_pmacssdqh (rtx, rtx, rtx, rtx);
9458extern rtx gen_xop_pmacswd (rtx, rtx, rtx, rtx);
9459extern rtx gen_xop_pmacsswd (rtx, rtx, rtx, rtx);
9460extern rtx gen_xop_pmadcswd (rtx, rtx, rtx, rtx);
9461extern rtx gen_xop_pmadcsswd (rtx, rtx, rtx, rtx);
9462extern rtx gen_xop_pcmov_v32qi256 (rtx, rtx, rtx, rtx);
9463extern rtx gen_xop_pcmov_v16qi (rtx, rtx, rtx, rtx);
9464extern rtx gen_xop_pcmov_v16hi256 (rtx, rtx, rtx, rtx);
9465extern rtx gen_xop_pcmov_v8hi (rtx, rtx, rtx, rtx);
9466extern rtx gen_xop_pcmov_v16si512 (rtx, rtx, rtx, rtx);
9467extern rtx gen_xop_pcmov_v8si256 (rtx, rtx, rtx, rtx);
9468extern rtx gen_xop_pcmov_v4si (rtx, rtx, rtx, rtx);
9469extern rtx gen_xop_pcmov_v8di512 (rtx, rtx, rtx, rtx);
9470extern rtx gen_xop_pcmov_v4di256 (rtx, rtx, rtx, rtx);
9471extern rtx gen_xop_pcmov_v2di (rtx, rtx, rtx, rtx);
9472extern rtx gen_xop_pcmov_v16sf512 (rtx, rtx, rtx, rtx);
9473extern rtx gen_xop_pcmov_v8sf256 (rtx, rtx, rtx, rtx);
9474extern rtx gen_xop_pcmov_v4sf (rtx, rtx, rtx, rtx);
9475extern rtx gen_xop_pcmov_v8df512 (rtx, rtx, rtx, rtx);
9476extern rtx gen_xop_pcmov_v4df256 (rtx, rtx, rtx, rtx);
9477extern rtx gen_xop_pcmov_v2df (rtx, rtx, rtx, rtx);
9478extern rtx gen_xop_phaddbw (rtx, rtx);
9479extern rtx gen_xop_phaddubw (rtx, rtx);
9480extern rtx gen_xop_phaddbd (rtx, rtx);
9481extern rtx gen_xop_phaddubd (rtx, rtx);
9482extern rtx gen_xop_phaddbq (rtx, rtx);
9483extern rtx gen_xop_phaddubq (rtx, rtx);
9484extern rtx gen_xop_phaddwd (rtx, rtx);
9485extern rtx gen_xop_phadduwd (rtx, rtx);
9486extern rtx gen_xop_phaddwq (rtx, rtx);
9487extern rtx gen_xop_phadduwq (rtx, rtx);
9488extern rtx gen_xop_phadddq (rtx, rtx);
9489extern rtx gen_xop_phaddudq (rtx, rtx);
9490extern rtx gen_xop_phsubbw (rtx, rtx);
9491extern rtx gen_xop_phsubwd (rtx, rtx);
9492extern rtx gen_xop_phsubdq (rtx, rtx);
9493extern rtx gen_xop_pperm (rtx, rtx, rtx, rtx);
9494extern rtx gen_xop_pperm_pack_v2di_v4si (rtx, rtx, rtx, rtx);
9495extern rtx gen_xop_pperm_pack_v4si_v8hi (rtx, rtx, rtx, rtx);
9496extern rtx gen_xop_pperm_pack_v8hi_v16qi (rtx, rtx, rtx, rtx);
9497extern rtx gen_xop_rotlv16qi3 (rtx, rtx, rtx);
9498extern rtx gen_xop_rotlv8hi3 (rtx, rtx, rtx);
9499extern rtx gen_xop_rotlv4si3 (rtx, rtx, rtx);
9500extern rtx gen_xop_rotlv2di3 (rtx, rtx, rtx);
9501extern rtx gen_xop_rotrv16qi3 (rtx, rtx, rtx);
9502extern rtx gen_xop_rotrv8hi3 (rtx, rtx, rtx);
9503extern rtx gen_xop_rotrv4si3 (rtx, rtx, rtx);
9504extern rtx gen_xop_rotrv2di3 (rtx, rtx, rtx);
9505extern rtx gen_xop_vrotlv16qi3 (rtx, rtx, rtx);
9506extern rtx gen_xop_vrotlv8hi3 (rtx, rtx, rtx);
9507extern rtx gen_xop_vrotlv4si3 (rtx, rtx, rtx);
9508extern rtx gen_xop_vrotlv2di3 (rtx, rtx, rtx);
9509extern rtx gen_xop_shav16qi3 (rtx, rtx, rtx);
9510extern rtx gen_xop_shav8hi3 (rtx, rtx, rtx);
9511extern rtx gen_xop_shav4si3 (rtx, rtx, rtx);
9512extern rtx gen_xop_shav2di3 (rtx, rtx, rtx);
9513extern rtx gen_xop_shlv16qi3 (rtx, rtx, rtx);
9514extern rtx gen_xop_shlv8hi3 (rtx, rtx, rtx);
9515extern rtx gen_xop_shlv4si3 (rtx, rtx, rtx);
9516extern rtx gen_xop_shlv2di3 (rtx, rtx, rtx);
9517extern rtx gen_xop_frczsf2 (rtx, rtx);
9518extern rtx gen_xop_frczdf2 (rtx, rtx);
9519extern rtx gen_xop_frczv4sf2 (rtx, rtx);
9520extern rtx gen_xop_frczv2df2 (rtx, rtx);
9521extern rtx gen_xop_frczv8sf2 (rtx, rtx);
9522extern rtx gen_xop_frczv4df2 (rtx, rtx);
9523extern rtx gen_xop_maskcmpv16qi3 (rtx, rtx, rtx, rtx);
9524extern rtx gen_xop_maskcmpv8hi3 (rtx, rtx, rtx, rtx);
9525extern rtx gen_xop_maskcmpv4si3 (rtx, rtx, rtx, rtx);
9526extern rtx gen_xop_maskcmpv2di3 (rtx, rtx, rtx, rtx);
9527extern rtx gen_xop_maskcmp_unsv16qi3 (rtx, rtx, rtx, rtx);
9528extern rtx gen_xop_maskcmp_unsv8hi3 (rtx, rtx, rtx, rtx);
9529extern rtx gen_xop_maskcmp_unsv4si3 (rtx, rtx, rtx, rtx);
9530extern rtx gen_xop_maskcmp_unsv2di3 (rtx, rtx, rtx, rtx);
9531extern rtx gen_xop_maskcmp_uns2v16qi3 (rtx, rtx, rtx, rtx);
9532extern rtx gen_xop_maskcmp_uns2v8hi3 (rtx, rtx, rtx, rtx);
9533extern rtx gen_xop_maskcmp_uns2v4si3 (rtx, rtx, rtx, rtx);
9534extern rtx gen_xop_maskcmp_uns2v2di3 (rtx, rtx, rtx, rtx);
9535extern rtx gen_xop_pcom_tfv16qi3 (rtx, rtx, rtx, rtx);
9536extern rtx gen_xop_pcom_tfv8hi3 (rtx, rtx, rtx, rtx);
9537extern rtx gen_xop_pcom_tfv4si3 (rtx, rtx, rtx, rtx);
9538extern rtx gen_xop_pcom_tfv2di3 (rtx, rtx, rtx, rtx);
9539extern rtx gen_xop_vpermil2v8sf3 (rtx, rtx, rtx, rtx, rtx);
9540extern rtx gen_xop_vpermil2v4sf3 (rtx, rtx, rtx, rtx, rtx);
9541extern rtx gen_xop_vpermil2v4df3 (rtx, rtx, rtx, rtx, rtx);
9542extern rtx gen_xop_vpermil2v2df3 (rtx, rtx, rtx, rtx, rtx);
9543extern rtx gen_aesenc (rtx, rtx, rtx);
9544extern rtx gen_aesenclast (rtx, rtx, rtx);
9545extern rtx gen_aesdec (rtx, rtx, rtx);
9546extern rtx gen_aesdeclast (rtx, rtx, rtx);
9547extern rtx gen_aesimc (rtx, rtx);
9548extern rtx gen_aeskeygenassist (rtx, rtx, rtx);
9549extern rtx gen_pclmulqdq (rtx, rtx, rtx, rtx);
9550extern rtx gen_avx_vzeroupper (void);
9551extern rtx gen_avx2_pbroadcastv16si (rtx, rtx);
9552extern rtx gen_avx2_pbroadcastv8di (rtx, rtx);
9553extern rtx gen_avx2_pbroadcastv64qi (rtx, rtx);
9554extern rtx gen_avx2_pbroadcastv32qi (rtx, rtx);
9555extern rtx gen_avx2_pbroadcastv16qi (rtx, rtx);
9556extern rtx gen_avx2_pbroadcastv32hi (rtx, rtx);
9557extern rtx gen_avx2_pbroadcastv16hi (rtx, rtx);
9558extern rtx gen_avx2_pbroadcastv8hi (rtx, rtx);
9559extern rtx gen_avx2_pbroadcastv8si (rtx, rtx);
9560extern rtx gen_avx2_pbroadcastv4si (rtx, rtx);
9561extern rtx gen_avx2_pbroadcastv4di (rtx, rtx);
9562extern rtx gen_avx2_pbroadcastv2di (rtx, rtx);
9563extern rtx gen_avx2_pbroadcastv32qi_1 (rtx, rtx);
9564extern rtx gen_avx2_pbroadcastv16hi_1 (rtx, rtx);
9565extern rtx gen_avx2_pbroadcastv8si_1 (rtx, rtx);
9566extern rtx gen_avx2_pbroadcastv4di_1 (rtx, rtx);
9567extern rtx gen_avx2_permvarv8si (rtx, rtx, rtx);
9568extern rtx gen_avx2_permvarv8si_mask (rtx, rtx, rtx, rtx, rtx);
9569extern rtx gen_avx2_permvarv8sf (rtx, rtx, rtx);
9570extern rtx gen_avx2_permvarv8sf_mask (rtx, rtx, rtx, rtx, rtx);
9571extern rtx gen_avx512f_permvarv16si (rtx, rtx, rtx);
9572extern rtx gen_avx512f_permvarv16si_mask (rtx, rtx, rtx, rtx, rtx);
9573extern rtx gen_avx512f_permvarv16sf (rtx, rtx, rtx);
9574extern rtx gen_avx512f_permvarv16sf_mask (rtx, rtx, rtx, rtx, rtx);
9575extern rtx gen_avx512f_permvarv8di (rtx, rtx, rtx);
9576extern rtx gen_avx512f_permvarv8di_mask (rtx, rtx, rtx, rtx, rtx);
9577extern rtx gen_avx512f_permvarv8df (rtx, rtx, rtx);
9578extern rtx gen_avx512f_permvarv8df_mask (rtx, rtx, rtx, rtx, rtx);
9579extern rtx gen_avx2_permvarv4di (rtx, rtx, rtx);
9580extern rtx gen_avx2_permvarv4di_mask (rtx, rtx, rtx, rtx, rtx);
9581extern rtx gen_avx2_permvarv4df (rtx, rtx, rtx);
9582extern rtx gen_avx2_permvarv4df_mask (rtx, rtx, rtx, rtx, rtx);
9583extern rtx gen_avx512bw_permvarv64qi (rtx, rtx, rtx);
9584extern rtx gen_avx512bw_permvarv64qi_mask (rtx, rtx, rtx, rtx, rtx);
9585extern rtx gen_avx512vl_permvarv16qi (rtx, rtx, rtx);
9586extern rtx gen_avx512vl_permvarv16qi_mask (rtx, rtx, rtx, rtx, rtx);
9587extern rtx gen_avx512vl_permvarv32qi (rtx, rtx, rtx);
9588extern rtx gen_avx512vl_permvarv32qi_mask (rtx, rtx, rtx, rtx, rtx);
9589extern rtx gen_avx512vl_permvarv8hi (rtx, rtx, rtx);
9590extern rtx gen_avx512vl_permvarv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9591extern rtx gen_avx512vl_permvarv16hi (rtx, rtx, rtx);
9592extern rtx gen_avx512vl_permvarv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9593extern rtx gen_avx512bw_permvarv32hi (rtx, rtx, rtx);
9594extern rtx gen_avx512bw_permvarv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9595extern rtx gen_avx2_permv4di_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9596extern rtx gen_avx2_permv4di_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9597extern rtx gen_avx2_permv4df_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9598extern rtx gen_avx2_permv4df_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9599extern rtx gen_avx512f_permv8df_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9600extern rtx gen_avx512f_permv8df_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9601extern rtx gen_avx512f_permv8di_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9602extern rtx gen_avx512f_permv8di_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9603extern rtx gen_avx2_permv2ti (rtx, rtx, rtx, rtx);
9604extern rtx gen_avx2_vec_dupv4df (rtx, rtx);
9605extern rtx gen_avx512f_vec_dupv16si_1 (rtx, rtx);
9606extern rtx gen_avx512f_vec_dupv8di_1 (rtx, rtx);
9607extern rtx gen_avx512bw_vec_dupv32hi_1 (rtx, rtx);
9608extern rtx gen_avx512bw_vec_dupv64qi_1 (rtx, rtx);
9609extern rtx gen_avx512f_vec_dupv16si (rtx, rtx);
9610extern rtx gen_avx512f_vec_dupv16si_mask (rtx, rtx, rtx, rtx);
9611extern rtx gen_avx512vl_vec_dupv8si (rtx, rtx);
9612extern rtx gen_avx512vl_vec_dupv8si_mask (rtx, rtx, rtx, rtx);
9613extern rtx gen_avx512vl_vec_dupv4si (rtx, rtx);
9614extern rtx gen_avx512vl_vec_dupv4si_mask (rtx, rtx, rtx, rtx);
9615extern rtx gen_avx512f_vec_dupv8di (rtx, rtx);
9616extern rtx gen_avx512f_vec_dupv8di_mask (rtx, rtx, rtx, rtx);
9617extern rtx gen_avx512vl_vec_dupv4di (rtx, rtx);
9618extern rtx gen_avx512vl_vec_dupv4di_mask (rtx, rtx, rtx, rtx);
9619extern rtx gen_avx512vl_vec_dupv2di (rtx, rtx);
9620extern rtx gen_avx512vl_vec_dupv2di_mask (rtx, rtx, rtx, rtx);
9621extern rtx gen_avx512f_vec_dupv16sf (rtx, rtx);
9622extern rtx gen_avx512f_vec_dupv16sf_mask (rtx, rtx, rtx, rtx);
9623extern rtx gen_avx512vl_vec_dupv8sf (rtx, rtx);
9624extern rtx gen_avx512vl_vec_dupv8sf_mask (rtx, rtx, rtx, rtx);
9625extern rtx gen_avx512vl_vec_dupv4sf (rtx, rtx);
9626extern rtx gen_avx512vl_vec_dupv4sf_mask (rtx, rtx, rtx, rtx);
9627extern rtx gen_avx512f_vec_dupv8df (rtx, rtx);
9628extern rtx gen_avx512f_vec_dupv8df_mask (rtx, rtx, rtx, rtx);
9629extern rtx gen_avx512vl_vec_dupv4df (rtx, rtx);
9630extern rtx gen_avx512vl_vec_dupv4df_mask (rtx, rtx, rtx, rtx);
9631extern rtx gen_avx512vl_vec_dupv2df (rtx, rtx);
9632extern rtx gen_avx512vl_vec_dupv2df_mask (rtx, rtx, rtx, rtx);
9633extern rtx gen_avx512bw_vec_dupv64qi (rtx, rtx);
9634extern rtx gen_avx512bw_vec_dupv64qi_mask (rtx, rtx, rtx, rtx);
9635extern rtx gen_avx512vl_vec_dupv16qi (rtx, rtx);
9636extern rtx gen_avx512vl_vec_dupv16qi_mask (rtx, rtx, rtx, rtx);
9637extern rtx gen_avx512vl_vec_dupv32qi (rtx, rtx);
9638extern rtx gen_avx512vl_vec_dupv32qi_mask (rtx, rtx, rtx, rtx);
9639extern rtx gen_avx512bw_vec_dupv32hi (rtx, rtx);
9640extern rtx gen_avx512bw_vec_dupv32hi_mask (rtx, rtx, rtx, rtx);
9641extern rtx gen_avx512vl_vec_dupv16hi (rtx, rtx);
9642extern rtx gen_avx512vl_vec_dupv16hi_mask (rtx, rtx, rtx, rtx);
9643extern rtx gen_avx512vl_vec_dupv8hi (rtx, rtx);
9644extern rtx gen_avx512vl_vec_dupv8hi_mask (rtx, rtx, rtx, rtx);
9645extern rtx gen_avx512f_broadcastv16sf_mask (rtx, rtx, rtx, rtx);
9646extern rtx gen_avx512f_broadcastv16si_mask (rtx, rtx, rtx, rtx);
9647extern rtx gen_avx512f_broadcastv8df_mask (rtx, rtx, rtx, rtx);
9648extern rtx gen_avx512f_broadcastv8di_mask (rtx, rtx, rtx, rtx);
9649extern rtx gen_avx512bw_vec_dup_gprv64qi_mask (rtx, rtx, rtx, rtx);
9650extern rtx gen_avx512vl_vec_dup_gprv16qi_mask (rtx, rtx, rtx, rtx);
9651extern rtx gen_avx512vl_vec_dup_gprv32qi_mask (rtx, rtx, rtx, rtx);
9652extern rtx gen_avx512bw_vec_dup_gprv32hi_mask (rtx, rtx, rtx, rtx);
9653extern rtx gen_avx512vl_vec_dup_gprv16hi_mask (rtx, rtx, rtx, rtx);
9654extern rtx gen_avx512vl_vec_dup_gprv8hi_mask (rtx, rtx, rtx, rtx);
9655extern rtx gen_avx512f_vec_dup_gprv16si_mask (rtx, rtx, rtx, rtx);
9656extern rtx gen_avx512vl_vec_dup_gprv8si_mask (rtx, rtx, rtx, rtx);
9657extern rtx gen_avx512vl_vec_dup_gprv4si_mask (rtx, rtx, rtx, rtx);
9658extern rtx gen_avx512f_vec_dup_gprv8di_mask (rtx, rtx, rtx, rtx);
9659extern rtx gen_avx512vl_vec_dup_gprv4di_mask (rtx, rtx, rtx, rtx);
9660extern rtx gen_avx512vl_vec_dup_gprv2di_mask (rtx, rtx, rtx, rtx);
9661extern rtx gen_avx512f_vec_dup_gprv16sf_mask (rtx, rtx, rtx, rtx);
9662extern rtx gen_avx512vl_vec_dup_gprv8sf_mask (rtx, rtx, rtx, rtx);
9663extern rtx gen_avx512vl_vec_dup_gprv4sf_mask (rtx, rtx, rtx, rtx);
9664extern rtx gen_avx512f_vec_dup_gprv8df_mask (rtx, rtx, rtx, rtx);
9665extern rtx gen_avx512vl_vec_dup_gprv4df_mask (rtx, rtx, rtx, rtx);
9666extern rtx gen_avx512vl_vec_dup_gprv2df_mask (rtx, rtx, rtx, rtx);
9667extern rtx gen_vec_dupv4sf (rtx, rtx);
9668extern rtx gen_avx2_vbroadcasti128_v32qi (rtx, rtx);
9669extern rtx gen_avx2_vbroadcasti128_v16hi (rtx, rtx);
9670extern rtx gen_avx2_vbroadcasti128_v8si (rtx, rtx);
9671extern rtx gen_avx2_vbroadcasti128_v4di (rtx, rtx);
9672extern rtx gen_vec_dupv8si (rtx, rtx);
9673extern rtx gen_vec_dupv8sf (rtx, rtx);
9674extern rtx gen_vec_dupv4di (rtx, rtx);
9675extern rtx gen_vec_dupv4df (rtx, rtx);
9676extern rtx gen_avx_vbroadcastf128_v32qi (rtx, rtx);
9677extern rtx gen_avx_vbroadcastf128_v16hi (rtx, rtx);
9678extern rtx gen_avx_vbroadcastf128_v8si (rtx, rtx);
9679extern rtx gen_avx_vbroadcastf128_v4di (rtx, rtx);
9680extern rtx gen_avx_vbroadcastf128_v8sf (rtx, rtx);
9681extern rtx gen_avx_vbroadcastf128_v4df (rtx, rtx);
9682extern rtx gen_avx512dq_broadcastv16si_mask (rtx, rtx, rtx, rtx);
9683extern rtx gen_avx512dq_broadcastv8si_mask (rtx, rtx, rtx, rtx);
9684extern rtx gen_avx512dq_broadcastv4si_mask (rtx, rtx, rtx, rtx);
9685extern rtx gen_avx512dq_broadcastv16sf_mask (rtx, rtx, rtx, rtx);
9686extern rtx gen_avx512dq_broadcastv8sf_mask (rtx, rtx, rtx, rtx);
9687extern rtx gen_avx512vl_broadcastv8si_mask_1 (rtx, rtx, rtx, rtx);
9688extern rtx gen_avx512vl_broadcastv8sf_mask_1 (rtx, rtx, rtx, rtx);
9689extern rtx gen_avx512dq_broadcastv16sf_mask_1 (rtx, rtx, rtx, rtx);
9690extern rtx gen_avx512dq_broadcastv16si_mask_1 (rtx, rtx, rtx, rtx);
9691extern rtx gen_avx512dq_broadcastv8di_mask_1 (rtx, rtx, rtx, rtx);
9692extern rtx gen_avx512dq_broadcastv8df_mask_1 (rtx, rtx, rtx, rtx);
9693extern rtx gen_avx512dq_broadcastv4di_mask_1 (rtx, rtx, rtx, rtx);
9694extern rtx gen_avx512dq_broadcastv4df_mask_1 (rtx, rtx, rtx, rtx);
9695extern rtx gen_avx512cd_maskb_vec_dupv8di (rtx, rtx);
9696extern rtx gen_avx512cd_maskb_vec_dupv4di (rtx, rtx);
9697extern rtx gen_avx512cd_maskb_vec_dupv2di (rtx, rtx);
9698extern rtx gen_avx512cd_maskw_vec_dupv16si (rtx, rtx);
9699extern rtx gen_avx512cd_maskw_vec_dupv8si (rtx, rtx);
9700extern rtx gen_avx512cd_maskw_vec_dupv4si (rtx, rtx);
9701extern rtx gen_avx512f_vpermilvarv16sf3 (rtx, rtx, rtx);
9702extern rtx gen_avx512f_vpermilvarv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
9703extern rtx gen_avx_vpermilvarv8sf3 (rtx, rtx, rtx);
9704extern rtx gen_avx_vpermilvarv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
9705extern rtx gen_avx_vpermilvarv4sf3 (rtx, rtx, rtx);
9706extern rtx gen_avx_vpermilvarv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
9707extern rtx gen_avx512f_vpermilvarv8df3 (rtx, rtx, rtx);
9708extern rtx gen_avx512f_vpermilvarv8df3_mask (rtx, rtx, rtx, rtx, rtx);
9709extern rtx gen_avx_vpermilvarv4df3 (rtx, rtx, rtx);
9710extern rtx gen_avx_vpermilvarv4df3_mask (rtx, rtx, rtx, rtx, rtx);
9711extern rtx gen_avx_vpermilvarv2df3 (rtx, rtx, rtx);
9712extern rtx gen_avx_vpermilvarv2df3_mask (rtx, rtx, rtx, rtx, rtx);
9713extern rtx gen_avx512f_vpermi2varv16si3 (rtx, rtx, rtx, rtx);
9714extern rtx gen_avx512f_vpermi2varv16si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9715extern rtx gen_avx512f_vpermi2varv16sf3 (rtx, rtx, rtx, rtx);
9716extern rtx gen_avx512f_vpermi2varv16sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9717extern rtx gen_avx512f_vpermi2varv8di3 (rtx, rtx, rtx, rtx);
9718extern rtx gen_avx512f_vpermi2varv8di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9719extern rtx gen_avx512f_vpermi2varv8df3 (rtx, rtx, rtx, rtx);
9720extern rtx gen_avx512f_vpermi2varv8df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9721extern rtx gen_avx512vl_vpermi2varv8si3 (rtx, rtx, rtx, rtx);
9722extern rtx gen_avx512vl_vpermi2varv8si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9723extern rtx gen_avx512vl_vpermi2varv8sf3 (rtx, rtx, rtx, rtx);
9724extern rtx gen_avx512vl_vpermi2varv8sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9725extern rtx gen_avx512vl_vpermi2varv4di3 (rtx, rtx, rtx, rtx);
9726extern rtx gen_avx512vl_vpermi2varv4di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9727extern rtx gen_avx512vl_vpermi2varv4df3 (rtx, rtx, rtx, rtx);
9728extern rtx gen_avx512vl_vpermi2varv4df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9729extern rtx gen_avx512vl_vpermi2varv4si3 (rtx, rtx, rtx, rtx);
9730extern rtx gen_avx512vl_vpermi2varv4si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9731extern rtx gen_avx512vl_vpermi2varv4sf3 (rtx, rtx, rtx, rtx);
9732extern rtx gen_avx512vl_vpermi2varv4sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9733extern rtx gen_avx512vl_vpermi2varv2di3 (rtx, rtx, rtx, rtx);
9734extern rtx gen_avx512vl_vpermi2varv2di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9735extern rtx gen_avx512vl_vpermi2varv2df3 (rtx, rtx, rtx, rtx);
9736extern rtx gen_avx512vl_vpermi2varv2df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9737extern rtx gen_avx512bw_vpermi2varv64qi3 (rtx, rtx, rtx, rtx);
9738extern rtx gen_avx512bw_vpermi2varv64qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9739extern rtx gen_avx512vl_vpermi2varv16qi3 (rtx, rtx, rtx, rtx);
9740extern rtx gen_avx512vl_vpermi2varv16qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9741extern rtx gen_avx512vl_vpermi2varv32qi3 (rtx, rtx, rtx, rtx);
9742extern rtx gen_avx512vl_vpermi2varv32qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9743extern rtx gen_avx512vl_vpermi2varv8hi3 (rtx, rtx, rtx, rtx);
9744extern rtx gen_avx512vl_vpermi2varv8hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9745extern rtx gen_avx512vl_vpermi2varv16hi3 (rtx, rtx, rtx, rtx);
9746extern rtx gen_avx512vl_vpermi2varv16hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9747extern rtx gen_avx512bw_vpermi2varv32hi3 (rtx, rtx, rtx, rtx);
9748extern rtx gen_avx512bw_vpermi2varv32hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9749extern rtx gen_avx512f_vpermi2varv16si3_mask (rtx, rtx, rtx, rtx, rtx);
9750extern rtx gen_avx512f_vpermi2varv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
9751extern rtx gen_avx512f_vpermi2varv8di3_mask (rtx, rtx, rtx, rtx, rtx);
9752extern rtx gen_avx512f_vpermi2varv8df3_mask (rtx, rtx, rtx, rtx, rtx);
9753extern rtx gen_avx512vl_vpermi2varv8si3_mask (rtx, rtx, rtx, rtx, rtx);
9754extern rtx gen_avx512vl_vpermi2varv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
9755extern rtx gen_avx512vl_vpermi2varv4di3_mask (rtx, rtx, rtx, rtx, rtx);
9756extern rtx gen_avx512vl_vpermi2varv4df3_mask (rtx, rtx, rtx, rtx, rtx);
9757extern rtx gen_avx512vl_vpermi2varv4si3_mask (rtx, rtx, rtx, rtx, rtx);
9758extern rtx gen_avx512vl_vpermi2varv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
9759extern rtx gen_avx512vl_vpermi2varv2di3_mask (rtx, rtx, rtx, rtx, rtx);
9760extern rtx gen_avx512vl_vpermi2varv2df3_mask (rtx, rtx, rtx, rtx, rtx);
9761extern rtx gen_avx512bw_vpermi2varv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
9762extern rtx gen_avx512vl_vpermi2varv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
9763extern rtx gen_avx512vl_vpermi2varv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
9764extern rtx gen_avx512vl_vpermi2varv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
9765extern rtx gen_avx512vl_vpermi2varv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
9766extern rtx gen_avx512bw_vpermi2varv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
9767extern rtx gen_avx512f_vpermt2varv16si3 (rtx, rtx, rtx, rtx);
9768extern rtx gen_avx512f_vpermt2varv16si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9769extern rtx gen_avx512f_vpermt2varv16sf3 (rtx, rtx, rtx, rtx);
9770extern rtx gen_avx512f_vpermt2varv16sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9771extern rtx gen_avx512f_vpermt2varv8di3 (rtx, rtx, rtx, rtx);
9772extern rtx gen_avx512f_vpermt2varv8di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9773extern rtx gen_avx512f_vpermt2varv8df3 (rtx, rtx, rtx, rtx);
9774extern rtx gen_avx512f_vpermt2varv8df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9775extern rtx gen_avx512vl_vpermt2varv8si3 (rtx, rtx, rtx, rtx);
9776extern rtx gen_avx512vl_vpermt2varv8si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9777extern rtx gen_avx512vl_vpermt2varv8sf3 (rtx, rtx, rtx, rtx);
9778extern rtx gen_avx512vl_vpermt2varv8sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9779extern rtx gen_avx512vl_vpermt2varv4di3 (rtx, rtx, rtx, rtx);
9780extern rtx gen_avx512vl_vpermt2varv4di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9781extern rtx gen_avx512vl_vpermt2varv4df3 (rtx, rtx, rtx, rtx);
9782extern rtx gen_avx512vl_vpermt2varv4df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9783extern rtx gen_avx512vl_vpermt2varv4si3 (rtx, rtx, rtx, rtx);
9784extern rtx gen_avx512vl_vpermt2varv4si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9785extern rtx gen_avx512vl_vpermt2varv4sf3 (rtx, rtx, rtx, rtx);
9786extern rtx gen_avx512vl_vpermt2varv4sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9787extern rtx gen_avx512vl_vpermt2varv2di3 (rtx, rtx, rtx, rtx);
9788extern rtx gen_avx512vl_vpermt2varv2di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9789extern rtx gen_avx512vl_vpermt2varv2df3 (rtx, rtx, rtx, rtx);
9790extern rtx gen_avx512vl_vpermt2varv2df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9791extern rtx gen_avx512bw_vpermt2varv64qi3 (rtx, rtx, rtx, rtx);
9792extern rtx gen_avx512bw_vpermt2varv64qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9793extern rtx gen_avx512vl_vpermt2varv16qi3 (rtx, rtx, rtx, rtx);
9794extern rtx gen_avx512vl_vpermt2varv16qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9795extern rtx gen_avx512vl_vpermt2varv32qi3 (rtx, rtx, rtx, rtx);
9796extern rtx gen_avx512vl_vpermt2varv32qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9797extern rtx gen_avx512vl_vpermt2varv8hi3 (rtx, rtx, rtx, rtx);
9798extern rtx gen_avx512vl_vpermt2varv8hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9799extern rtx gen_avx512vl_vpermt2varv16hi3 (rtx, rtx, rtx, rtx);
9800extern rtx gen_avx512vl_vpermt2varv16hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9801extern rtx gen_avx512bw_vpermt2varv32hi3 (rtx, rtx, rtx, rtx);
9802extern rtx gen_avx512bw_vpermt2varv32hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
9803extern rtx gen_avx512f_vpermt2varv16si3_mask (rtx, rtx, rtx, rtx, rtx);
9804extern rtx gen_avx512f_vpermt2varv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
9805extern rtx gen_avx512f_vpermt2varv8di3_mask (rtx, rtx, rtx, rtx, rtx);
9806extern rtx gen_avx512f_vpermt2varv8df3_mask (rtx, rtx, rtx, rtx, rtx);
9807extern rtx gen_avx512vl_vpermt2varv8si3_mask (rtx, rtx, rtx, rtx, rtx);
9808extern rtx gen_avx512vl_vpermt2varv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
9809extern rtx gen_avx512vl_vpermt2varv4di3_mask (rtx, rtx, rtx, rtx, rtx);
9810extern rtx gen_avx512vl_vpermt2varv4df3_mask (rtx, rtx, rtx, rtx, rtx);
9811extern rtx gen_avx512vl_vpermt2varv4si3_mask (rtx, rtx, rtx, rtx, rtx);
9812extern rtx gen_avx512vl_vpermt2varv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
9813extern rtx gen_avx512vl_vpermt2varv2di3_mask (rtx, rtx, rtx, rtx, rtx);
9814extern rtx gen_avx512vl_vpermt2varv2df3_mask (rtx, rtx, rtx, rtx, rtx);
9815extern rtx gen_avx512bw_vpermt2varv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
9816extern rtx gen_avx512vl_vpermt2varv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
9817extern rtx gen_avx512vl_vpermt2varv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
9818extern rtx gen_avx512vl_vpermt2varv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
9819extern rtx gen_avx512vl_vpermt2varv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
9820extern rtx gen_avx512bw_vpermt2varv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
9821extern rtx gen_vec_set_lo_v4di (rtx, rtx, rtx);
9822extern rtx gen_vec_set_lo_v4di_mask (rtx, rtx, rtx, rtx, rtx);
9823extern rtx gen_vec_set_lo_v4df (rtx, rtx, rtx);
9824extern rtx gen_vec_set_lo_v4df_mask (rtx, rtx, rtx, rtx, rtx);
9825extern rtx gen_vec_set_hi_v4di (rtx, rtx, rtx);
9826extern rtx gen_vec_set_hi_v4di_mask (rtx, rtx, rtx, rtx, rtx);
9827extern rtx gen_vec_set_hi_v4df (rtx, rtx, rtx);
9828extern rtx gen_vec_set_hi_v4df_mask (rtx, rtx, rtx, rtx, rtx);
9829extern rtx gen_vec_set_lo_v8si (rtx, rtx, rtx);
9830extern rtx gen_vec_set_lo_v8si_mask (rtx, rtx, rtx, rtx, rtx);
9831extern rtx gen_vec_set_lo_v8sf (rtx, rtx, rtx);
9832extern rtx gen_vec_set_lo_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
9833extern rtx gen_vec_set_hi_v8si (rtx, rtx, rtx);
9834extern rtx gen_vec_set_hi_v8si_mask (rtx, rtx, rtx, rtx, rtx);
9835extern rtx gen_vec_set_hi_v8sf (rtx, rtx, rtx);
9836extern rtx gen_vec_set_hi_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
9837extern rtx gen_vec_set_lo_v16hi (rtx, rtx, rtx);
9838extern rtx gen_vec_set_hi_v16hi (rtx, rtx, rtx);
9839extern rtx gen_vec_set_lo_v32qi (rtx, rtx, rtx);
9840extern rtx gen_vec_set_hi_v32qi (rtx, rtx, rtx);
9841extern rtx gen_avx_maskloadps (rtx, rtx, rtx);
9842extern rtx gen_avx_maskloadpd (rtx, rtx, rtx);
9843extern rtx gen_avx_maskloadps256 (rtx, rtx, rtx);
9844extern rtx gen_avx_maskloadpd256 (rtx, rtx, rtx);
9845extern rtx gen_avx2_maskloadd (rtx, rtx, rtx);
9846extern rtx gen_avx2_maskloadq (rtx, rtx, rtx);
9847extern rtx gen_avx2_maskloadd256 (rtx, rtx, rtx);
9848extern rtx gen_avx2_maskloadq256 (rtx, rtx, rtx);
9849extern rtx gen_avx_maskstoreps (rtx, rtx, rtx);
9850extern rtx gen_avx_maskstorepd (rtx, rtx, rtx);
9851extern rtx gen_avx_maskstoreps256 (rtx, rtx, rtx);
9852extern rtx gen_avx_maskstorepd256 (rtx, rtx, rtx);
9853extern rtx gen_avx2_maskstored (rtx, rtx, rtx);
9854extern rtx gen_avx2_maskstoreq (rtx, rtx, rtx);
9855extern rtx gen_avx2_maskstored256 (rtx, rtx, rtx);
9856extern rtx gen_avx2_maskstoreq256 (rtx, rtx, rtx);
9857extern rtx gen_avx_si256_si (rtx, rtx);
9858extern rtx gen_avx_ps256_ps (rtx, rtx);
9859extern rtx gen_avx_pd256_pd (rtx, rtx);
9860extern rtx gen_avx2_ashrvv4si (rtx, rtx, rtx);
9861extern rtx gen_avx2_ashrvv4si_mask (rtx, rtx, rtx, rtx, rtx);
9862extern rtx gen_avx2_ashrvv8si (rtx, rtx, rtx);
9863extern rtx gen_avx2_ashrvv8si_mask (rtx, rtx, rtx, rtx, rtx);
9864extern rtx gen_avx512f_ashrvv16si (rtx, rtx, rtx);
9865extern rtx gen_avx512f_ashrvv16si_mask (rtx, rtx, rtx, rtx, rtx);
9866extern rtx gen_avx2_ashrvv2di (rtx, rtx, rtx);
9867extern rtx gen_avx2_ashrvv2di_mask (rtx, rtx, rtx, rtx, rtx);
9868extern rtx gen_avx2_ashrvv4di (rtx, rtx, rtx);
9869extern rtx gen_avx2_ashrvv4di_mask (rtx, rtx, rtx, rtx, rtx);
9870extern rtx gen_avx512f_ashrvv8di (rtx, rtx, rtx);
9871extern rtx gen_avx512f_ashrvv8di_mask (rtx, rtx, rtx, rtx, rtx);
9872extern rtx gen_avx512vl_ashrvv8hi (rtx, rtx, rtx);
9873extern rtx gen_avx512vl_ashrvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9874extern rtx gen_avx512vl_ashrvv16hi (rtx, rtx, rtx);
9875extern rtx gen_avx512vl_ashrvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9876extern rtx gen_avx512bw_ashrvv32hi (rtx, rtx, rtx);
9877extern rtx gen_avx512bw_ashrvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9878extern rtx gen_avx512f_ashlvv16si (rtx, rtx, rtx);
9879extern rtx gen_avx512f_ashlvv16si_mask (rtx, rtx, rtx, rtx, rtx);
9880extern rtx gen_avx512f_lshrvv16si (rtx, rtx, rtx);
9881extern rtx gen_avx512f_lshrvv16si_mask (rtx, rtx, rtx, rtx, rtx);
9882extern rtx gen_avx2_ashlvv8si (rtx, rtx, rtx);
9883extern rtx gen_avx2_ashlvv8si_mask (rtx, rtx, rtx, rtx, rtx);
9884extern rtx gen_avx2_lshrvv8si (rtx, rtx, rtx);
9885extern rtx gen_avx2_lshrvv8si_mask (rtx, rtx, rtx, rtx, rtx);
9886extern rtx gen_avx2_ashlvv4si (rtx, rtx, rtx);
9887extern rtx gen_avx2_ashlvv4si_mask (rtx, rtx, rtx, rtx, rtx);
9888extern rtx gen_avx2_lshrvv4si (rtx, rtx, rtx);
9889extern rtx gen_avx2_lshrvv4si_mask (rtx, rtx, rtx, rtx, rtx);
9890extern rtx gen_avx512f_ashlvv8di (rtx, rtx, rtx);
9891extern rtx gen_avx512f_ashlvv8di_mask (rtx, rtx, rtx, rtx, rtx);
9892extern rtx gen_avx512f_lshrvv8di (rtx, rtx, rtx);
9893extern rtx gen_avx512f_lshrvv8di_mask (rtx, rtx, rtx, rtx, rtx);
9894extern rtx gen_avx2_ashlvv4di (rtx, rtx, rtx);
9895extern rtx gen_avx2_ashlvv4di_mask (rtx, rtx, rtx, rtx, rtx);
9896extern rtx gen_avx2_lshrvv4di (rtx, rtx, rtx);
9897extern rtx gen_avx2_lshrvv4di_mask (rtx, rtx, rtx, rtx, rtx);
9898extern rtx gen_avx2_ashlvv2di (rtx, rtx, rtx);
9899extern rtx gen_avx2_ashlvv2di_mask (rtx, rtx, rtx, rtx, rtx);
9900extern rtx gen_avx2_lshrvv2di (rtx, rtx, rtx);
9901extern rtx gen_avx2_lshrvv2di_mask (rtx, rtx, rtx, rtx, rtx);
9902extern rtx gen_avx512vl_ashlvv8hi (rtx, rtx, rtx);
9903extern rtx gen_avx512vl_ashlvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9904extern rtx gen_avx512vl_lshrvv8hi (rtx, rtx, rtx);
9905extern rtx gen_avx512vl_lshrvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
9906extern rtx gen_avx512vl_ashlvv16hi (rtx, rtx, rtx);
9907extern rtx gen_avx512vl_ashlvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9908extern rtx gen_avx512vl_lshrvv16hi (rtx, rtx, rtx);
9909extern rtx gen_avx512vl_lshrvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
9910extern rtx gen_avx512bw_ashlvv32hi (rtx, rtx, rtx);
9911extern rtx gen_avx512bw_ashlvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9912extern rtx gen_avx512bw_lshrvv32hi (rtx, rtx, rtx);
9913extern rtx gen_avx512bw_lshrvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
9914extern rtx gen_avx_vec_concatv32qi (rtx, rtx, rtx);
9915extern rtx gen_avx_vec_concatv16hi (rtx, rtx, rtx);
9916extern rtx gen_avx_vec_concatv8si (rtx, rtx, rtx);
9917extern rtx gen_avx_vec_concatv4di (rtx, rtx, rtx);
9918extern rtx gen_avx_vec_concatv8sf (rtx, rtx, rtx);
9919extern rtx gen_avx_vec_concatv4df (rtx, rtx, rtx);
9920extern rtx gen_avx_vec_concatv64qi (rtx, rtx, rtx);
9921extern rtx gen_avx_vec_concatv32hi (rtx, rtx, rtx);
9922extern rtx gen_avx_vec_concatv16si (rtx, rtx, rtx);
9923extern rtx gen_avx_vec_concatv8di (rtx, rtx, rtx);
9924extern rtx gen_avx_vec_concatv16sf (rtx, rtx, rtx);
9925extern rtx gen_avx_vec_concatv8df (rtx, rtx, rtx);
9926extern rtx gen_vcvtph2ps (rtx, rtx);
9927extern rtx gen_vcvtph2ps_mask (rtx, rtx, rtx, rtx);
9928extern rtx gen_vcvtph2ps256 (rtx, rtx);
9929extern rtx gen_vcvtph2ps256_mask (rtx, rtx, rtx, rtx);
9930extern rtx gen_avx512f_vcvtph2ps512_mask (rtx, rtx, rtx, rtx);
9931extern rtx gen_avx512f_vcvtph2ps512_mask_round (rtx, rtx, rtx, rtx, rtx);
9932extern rtx gen_vcvtps2ph256 (rtx, rtx, rtx);
9933extern rtx gen_vcvtps2ph256_mask (rtx, rtx, rtx, rtx, rtx);
9934extern rtx gen_avx512f_vcvtps2ph512_mask (rtx, rtx, rtx, rtx, rtx);
9935extern rtx gen_avx512f_compressv16si_mask (rtx, rtx, rtx, rtx);
9936extern rtx gen_avx512f_compressv16sf_mask (rtx, rtx, rtx, rtx);
9937extern rtx gen_avx512f_compressv8di_mask (rtx, rtx, rtx, rtx);
9938extern rtx gen_avx512f_compressv8df_mask (rtx, rtx, rtx, rtx);
9939extern rtx gen_avx512vl_compressv8si_mask (rtx, rtx, rtx, rtx);
9940extern rtx gen_avx512vl_compressv8sf_mask (rtx, rtx, rtx, rtx);
9941extern rtx gen_avx512vl_compressv4di_mask (rtx, rtx, rtx, rtx);
9942extern rtx gen_avx512vl_compressv4df_mask (rtx, rtx, rtx, rtx);
9943extern rtx gen_avx512vl_compressv4si_mask (rtx, rtx, rtx, rtx);
9944extern rtx gen_avx512vl_compressv4sf_mask (rtx, rtx, rtx, rtx);
9945extern rtx gen_avx512vl_compressv2di_mask (rtx, rtx, rtx, rtx);
9946extern rtx gen_avx512vl_compressv2df_mask (rtx, rtx, rtx, rtx);
9947extern rtx gen_avx512f_compressstorev16si_mask (rtx, rtx, rtx);
9948extern rtx gen_avx512f_compressstorev16sf_mask (rtx, rtx, rtx);
9949extern rtx gen_avx512f_compressstorev8di_mask (rtx, rtx, rtx);
9950extern rtx gen_avx512f_compressstorev8df_mask (rtx, rtx, rtx);
9951extern rtx gen_avx512vl_compressstorev8si_mask (rtx, rtx, rtx);
9952extern rtx gen_avx512vl_compressstorev8sf_mask (rtx, rtx, rtx);
9953extern rtx gen_avx512vl_compressstorev4di_mask (rtx, rtx, rtx);
9954extern rtx gen_avx512vl_compressstorev4df_mask (rtx, rtx, rtx);
9955extern rtx gen_avx512vl_compressstorev4si_mask (rtx, rtx, rtx);
9956extern rtx gen_avx512vl_compressstorev4sf_mask (rtx, rtx, rtx);
9957extern rtx gen_avx512vl_compressstorev2di_mask (rtx, rtx, rtx);
9958extern rtx gen_avx512vl_compressstorev2df_mask (rtx, rtx, rtx);
9959extern rtx gen_avx512f_expandv16si_mask (rtx, rtx, rtx, rtx);
9960extern rtx gen_avx512f_expandv16sf_mask (rtx, rtx, rtx, rtx);
9961extern rtx gen_avx512f_expandv8di_mask (rtx, rtx, rtx, rtx);
9962extern rtx gen_avx512f_expandv8df_mask (rtx, rtx, rtx, rtx);
9963extern rtx gen_avx512vl_expandv8si_mask (rtx, rtx, rtx, rtx);
9964extern rtx gen_avx512vl_expandv8sf_mask (rtx, rtx, rtx, rtx);
9965extern rtx gen_avx512vl_expandv4di_mask (rtx, rtx, rtx, rtx);
9966extern rtx gen_avx512vl_expandv4df_mask (rtx, rtx, rtx, rtx);
9967extern rtx gen_avx512vl_expandv4si_mask (rtx, rtx, rtx, rtx);
9968extern rtx gen_avx512vl_expandv4sf_mask (rtx, rtx, rtx, rtx);
9969extern rtx gen_avx512vl_expandv2di_mask (rtx, rtx, rtx, rtx);
9970extern rtx gen_avx512vl_expandv2df_mask (rtx, rtx, rtx, rtx);
9971extern rtx gen_avx512dq_rangepv16sf (rtx, rtx, rtx, rtx);
9972extern rtx gen_avx512dq_rangepv16sf_round (rtx, rtx, rtx, rtx, rtx);
9973extern rtx gen_avx512dq_rangepv16sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9974extern rtx gen_avx512dq_rangepv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9975extern rtx gen_avx512dq_rangepv8sf (rtx, rtx, rtx, rtx);
9976static inline rtx gen_avx512dq_rangepv8sf_round (rtx, rtx, rtx, rtx, rtx);
9977static inline rtx
9978gen_avx512dq_rangepv8sf_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
9979{
9980 return 0;
9981}
9982extern rtx gen_avx512dq_rangepv8sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9983static inline rtx gen_avx512dq_rangepv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9984static inline rtx
9985gen_avx512dq_rangepv8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
9986{
9987 return 0;
9988}
9989extern rtx gen_avx512dq_rangepv4sf (rtx, rtx, rtx, rtx);
9990static inline rtx gen_avx512dq_rangepv4sf_round (rtx, rtx, rtx, rtx, rtx);
9991static inline rtx
9992gen_avx512dq_rangepv4sf_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
9993{
9994 return 0;
9995}
9996extern rtx gen_avx512dq_rangepv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
9997static inline rtx gen_avx512dq_rangepv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
9998static inline rtx
9999gen_avx512dq_rangepv4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
10000{
10001 return 0;
10002}
10003extern rtx gen_avx512dq_rangepv8df (rtx, rtx, rtx, rtx);
10004extern rtx gen_avx512dq_rangepv8df_round (rtx, rtx, rtx, rtx, rtx);
10005extern rtx gen_avx512dq_rangepv8df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10006extern rtx gen_avx512dq_rangepv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10007extern rtx gen_avx512dq_rangepv4df (rtx, rtx, rtx, rtx);
10008static inline rtx gen_avx512dq_rangepv4df_round (rtx, rtx, rtx, rtx, rtx);
10009static inline rtx
10010gen_avx512dq_rangepv4df_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
10011{
10012 return 0;
10013}
10014extern rtx gen_avx512dq_rangepv4df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10015static inline rtx gen_avx512dq_rangepv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10016static inline rtx
10017gen_avx512dq_rangepv4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
10018{
10019 return 0;
10020}
10021extern rtx gen_avx512dq_rangepv2df (rtx, rtx, rtx, rtx);
10022static inline rtx gen_avx512dq_rangepv2df_round (rtx, rtx, rtx, rtx, rtx);
10023static inline rtx
10024gen_avx512dq_rangepv2df_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
10025{
10026 return 0;
10027}
10028extern rtx gen_avx512dq_rangepv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10029static inline rtx gen_avx512dq_rangepv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10030static inline rtx
10031gen_avx512dq_rangepv2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
10032{
10033 return 0;
10034}
10035extern rtx gen_avx512dq_rangesv4sf (rtx, rtx, rtx, rtx);
10036extern rtx gen_avx512dq_rangesv4sf_round (rtx, rtx, rtx, rtx, rtx);
10037extern rtx gen_avx512dq_rangesv2df (rtx, rtx, rtx, rtx);
10038extern rtx gen_avx512dq_rangesv2df_round (rtx, rtx, rtx, rtx, rtx);
10039extern rtx gen_avx512dq_fpclassv16sf (rtx, rtx, rtx);
10040extern rtx gen_avx512dq_fpclassv16sf_mask (rtx, rtx, rtx, rtx);
10041extern rtx gen_avx512dq_fpclassv8sf (rtx, rtx, rtx);
10042extern rtx gen_avx512dq_fpclassv8sf_mask (rtx, rtx, rtx, rtx);
10043extern rtx gen_avx512dq_fpclassv4sf (rtx, rtx, rtx);
10044extern rtx gen_avx512dq_fpclassv4sf_mask (rtx, rtx, rtx, rtx);
10045extern rtx gen_avx512dq_fpclassv8df (rtx, rtx, rtx);
10046extern rtx gen_avx512dq_fpclassv8df_mask (rtx, rtx, rtx, rtx);
10047extern rtx gen_avx512dq_fpclassv4df (rtx, rtx, rtx);
10048extern rtx gen_avx512dq_fpclassv4df_mask (rtx, rtx, rtx, rtx);
10049extern rtx gen_avx512dq_fpclassv2df (rtx, rtx, rtx);
10050extern rtx gen_avx512dq_fpclassv2df_mask (rtx, rtx, rtx, rtx);
10051extern rtx gen_avx512dq_vmfpclassv4sf (rtx, rtx, rtx);
10052extern rtx gen_avx512dq_vmfpclassv2df (rtx, rtx, rtx);
10053extern rtx gen_avx512f_getmantv16sf (rtx, rtx, rtx);
10054extern rtx gen_avx512f_getmantv16sf_round (rtx, rtx, rtx, rtx);
10055extern rtx gen_avx512f_getmantv16sf_mask (rtx, rtx, rtx, rtx, rtx);
10056extern rtx gen_avx512f_getmantv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10057extern rtx gen_avx512vl_getmantv8sf (rtx, rtx, rtx);
10058extern rtx gen_avx512vl_getmantv8sf_round (rtx, rtx, rtx, rtx);
10059extern rtx gen_avx512vl_getmantv8sf_mask (rtx, rtx, rtx, rtx, rtx);
10060extern rtx gen_avx512vl_getmantv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10061extern rtx gen_avx512vl_getmantv4sf (rtx, rtx, rtx);
10062extern rtx gen_avx512vl_getmantv4sf_round (rtx, rtx, rtx, rtx);
10063extern rtx gen_avx512vl_getmantv4sf_mask (rtx, rtx, rtx, rtx, rtx);
10064extern rtx gen_avx512vl_getmantv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10065extern rtx gen_avx512f_getmantv8df (rtx, rtx, rtx);
10066extern rtx gen_avx512f_getmantv8df_round (rtx, rtx, rtx, rtx);
10067extern rtx gen_avx512f_getmantv8df_mask (rtx, rtx, rtx, rtx, rtx);
10068extern rtx gen_avx512f_getmantv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10069extern rtx gen_avx512vl_getmantv4df (rtx, rtx, rtx);
10070extern rtx gen_avx512vl_getmantv4df_round (rtx, rtx, rtx, rtx);
10071extern rtx gen_avx512vl_getmantv4df_mask (rtx, rtx, rtx, rtx, rtx);
10072extern rtx gen_avx512vl_getmantv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10073extern rtx gen_avx512vl_getmantv2df (rtx, rtx, rtx);
10074extern rtx gen_avx512vl_getmantv2df_round (rtx, rtx, rtx, rtx);
10075extern rtx gen_avx512vl_getmantv2df_mask (rtx, rtx, rtx, rtx, rtx);
10076extern rtx gen_avx512vl_getmantv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10077extern rtx gen_avx512f_vgetmantv4sf (rtx, rtx, rtx, rtx);
10078extern rtx gen_avx512f_vgetmantv4sf_round (rtx, rtx, rtx, rtx, rtx);
10079extern rtx gen_avx512f_vgetmantv2df (rtx, rtx, rtx, rtx);
10080extern rtx gen_avx512f_vgetmantv2df_round (rtx, rtx, rtx, rtx, rtx);
10081extern rtx gen_avx512bw_dbpsadbwv8hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10082extern rtx gen_avx512bw_dbpsadbwv16hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10083extern rtx gen_avx512bw_dbpsadbwv32hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
10084extern rtx gen_clzv16si2 (rtx, rtx);
10085extern rtx gen_clzv16si2_mask (rtx, rtx, rtx, rtx);
10086extern rtx gen_clzv8si2 (rtx, rtx);
10087extern rtx gen_clzv8si2_mask (rtx, rtx, rtx, rtx);
10088extern rtx gen_clzv4si2 (rtx, rtx);
10089extern rtx gen_clzv4si2_mask (rtx, rtx, rtx, rtx);
10090extern rtx gen_clzv8di2 (rtx, rtx);
10091extern rtx gen_clzv8di2_mask (rtx, rtx, rtx, rtx);
10092extern rtx gen_clzv4di2 (rtx, rtx);
10093extern rtx gen_clzv4di2_mask (rtx, rtx, rtx, rtx);
10094extern rtx gen_clzv2di2 (rtx, rtx);
10095extern rtx gen_clzv2di2_mask (rtx, rtx, rtx, rtx);
10096extern rtx gen_conflictv16si_mask (rtx, rtx, rtx, rtx);
10097extern rtx gen_conflictv8si_mask (rtx, rtx, rtx, rtx);
10098extern rtx gen_conflictv4si_mask (rtx, rtx, rtx, rtx);
10099extern rtx gen_conflictv8di_mask (rtx, rtx, rtx, rtx);
10100extern rtx gen_conflictv4di_mask (rtx, rtx, rtx, rtx);
10101extern rtx gen_conflictv2di_mask (rtx, rtx, rtx, rtx);
10102extern rtx gen_sha1msg1 (rtx, rtx, rtx);
10103extern rtx gen_sha1msg2 (rtx, rtx, rtx);
10104extern rtx gen_sha1nexte (rtx, rtx, rtx);
10105extern rtx gen_sha1rnds4 (rtx, rtx, rtx, rtx);
10106extern rtx gen_sha256msg1 (rtx, rtx, rtx);
10107extern rtx gen_sha256msg2 (rtx, rtx, rtx);
10108extern rtx gen_sha256rnds2 (rtx, rtx, rtx, rtx);
10109extern rtx gen_avx512f_si512_si (rtx, rtx);
10110extern rtx gen_avx512f_ps512_ps (rtx, rtx);
10111extern rtx gen_avx512f_pd512_pd (rtx, rtx);
10112extern rtx gen_avx512f_si512_256si (rtx, rtx);
10113extern rtx gen_avx512f_ps512_256ps (rtx, rtx);
10114extern rtx gen_avx512f_pd512_256pd (rtx, rtx);
10115extern rtx gen_vpamdd52luqv8di (rtx, rtx, rtx, rtx);
10116extern rtx gen_vpamdd52luqv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10117extern rtx gen_vpamdd52huqv8di (rtx, rtx, rtx, rtx);
10118extern rtx gen_vpamdd52huqv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10119extern rtx gen_vpamdd52luqv4di (rtx, rtx, rtx, rtx);
10120extern rtx gen_vpamdd52luqv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10121extern rtx gen_vpamdd52huqv4di (rtx, rtx, rtx, rtx);
10122extern rtx gen_vpamdd52huqv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10123extern rtx gen_vpamdd52luqv2di (rtx, rtx, rtx, rtx);
10124extern rtx gen_vpamdd52luqv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10125extern rtx gen_vpamdd52huqv2di (rtx, rtx, rtx, rtx);
10126extern rtx gen_vpamdd52huqv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10127extern rtx gen_vpamdd52luqv8di_mask (rtx, rtx, rtx, rtx, rtx);
10128extern rtx gen_vpamdd52huqv8di_mask (rtx, rtx, rtx, rtx, rtx);
10129extern rtx gen_vpamdd52luqv4di_mask (rtx, rtx, rtx, rtx, rtx);
10130extern rtx gen_vpamdd52huqv4di_mask (rtx, rtx, rtx, rtx, rtx);
10131extern rtx gen_vpamdd52luqv2di_mask (rtx, rtx, rtx, rtx, rtx);
10132extern rtx gen_vpamdd52huqv2di_mask (rtx, rtx, rtx, rtx, rtx);
10133extern rtx gen_vpmultishiftqbv64qi (rtx, rtx, rtx);
10134extern rtx gen_vpmultishiftqbv64qi_mask (rtx, rtx, rtx, rtx, rtx);
10135extern rtx gen_vpmultishiftqbv16qi (rtx, rtx, rtx);
10136extern rtx gen_vpmultishiftqbv16qi_mask (rtx, rtx, rtx, rtx, rtx);
10137extern rtx gen_vpmultishiftqbv32qi (rtx, rtx, rtx);
10138extern rtx gen_vpmultishiftqbv32qi_mask (rtx, rtx, rtx, rtx, rtx);
10139extern rtx gen_avx5124fmaddps_4fmaddps (rtx, rtx, rtx, rtx);
10140extern rtx gen_avx5124fmaddps_4fmaddps_mask (rtx, rtx, rtx, rtx, rtx);
10141extern rtx gen_avx5124fmaddps_4fmaddps_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10142extern rtx gen_avx5124fmaddps_4fmaddss (rtx, rtx, rtx, rtx);
10143extern rtx gen_avx5124fmaddps_4fmaddss_mask (rtx, rtx, rtx, rtx, rtx);
10144extern rtx gen_avx5124fmaddps_4fmaddss_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10145extern rtx gen_avx5124fmaddps_4fnmaddps (rtx, rtx, rtx, rtx);
10146extern rtx gen_avx5124fmaddps_4fnmaddps_mask (rtx, rtx, rtx, rtx, rtx);
10147extern rtx gen_avx5124fmaddps_4fnmaddps_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10148extern rtx gen_avx5124fmaddps_4fnmaddss (rtx, rtx, rtx, rtx);
10149extern rtx gen_avx5124fmaddps_4fnmaddss_mask (rtx, rtx, rtx, rtx, rtx);
10150extern rtx gen_avx5124fmaddps_4fnmaddss_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10151extern rtx gen_avx5124vnniw_vp4dpwssd (rtx, rtx, rtx, rtx);
10152extern rtx gen_avx5124vnniw_vp4dpwssd_mask (rtx, rtx, rtx, rtx, rtx);
10153extern rtx gen_avx5124vnniw_vp4dpwssd_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10154extern rtx gen_avx5124vnniw_vp4dpwssds (rtx, rtx, rtx, rtx);
10155extern rtx gen_avx5124vnniw_vp4dpwssds_mask (rtx, rtx, rtx, rtx, rtx);
10156extern rtx gen_avx5124vnniw_vp4dpwssds_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
10157extern rtx gen_vpopcountv16si (rtx, rtx);
10158extern rtx gen_vpopcountv16si_mask (rtx, rtx, rtx, rtx);
10159extern rtx gen_vpopcountv8di (rtx, rtx);
10160extern rtx gen_vpopcountv8di_mask (rtx, rtx, rtx, rtx);
10161extern rtx gen_mfence_sse2 (rtx);
10162extern rtx gen_mfence_nosse (rtx);
10163extern rtx gen_atomic_loaddi_fpu (rtx, rtx, rtx);
10164extern rtx gen_atomic_storeqi_1 (rtx, rtx, rtx);
10165extern rtx gen_atomic_storehi_1 (rtx, rtx, rtx);
10166extern rtx gen_atomic_storesi_1 (rtx, rtx, rtx);
10167extern rtx gen_atomic_storedi_1 (rtx, rtx, rtx);
10168extern rtx gen_atomic_storedi_fpu (rtx, rtx, rtx);
10169extern rtx gen_loaddi_via_fpu (rtx, rtx);
10170extern rtx gen_storedi_via_fpu (rtx, rtx);
10171extern rtx gen_loaddi_via_sse (rtx, rtx);
10172extern rtx gen_storedi_via_sse (rtx, rtx);
10173extern rtx gen_atomic_compare_and_swapdi_doubleword (rtx, rtx, rtx, rtx, rtx, rtx);
10174extern rtx gen_atomic_compare_and_swapti_doubleword (rtx, rtx, rtx, rtx, rtx, rtx);
10175extern rtx gen_atomic_compare_and_swapqi_1 (rtx, rtx, rtx, rtx, rtx);
10176extern rtx gen_atomic_compare_and_swaphi_1 (rtx, rtx, rtx, rtx, rtx);
10177extern rtx gen_atomic_compare_and_swapsi_1 (rtx, rtx, rtx, rtx, rtx);
10178extern rtx gen_atomic_compare_and_swapdi_1 (rtx, rtx, rtx, rtx, rtx);
10179extern rtx gen_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
10180extern rtx gen_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
10181extern rtx gen_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
10182extern rtx gen_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
10183extern rtx gen_atomic_exchangeqi (rtx, rtx, rtx, rtx);
10184extern rtx gen_atomic_exchangehi (rtx, rtx, rtx, rtx);
10185extern rtx gen_atomic_exchangesi (rtx, rtx, rtx, rtx);
10186extern rtx gen_atomic_exchangedi (rtx, rtx, rtx, rtx);
10187extern rtx gen_atomic_addqi (rtx, rtx, rtx);
10188extern rtx gen_atomic_addhi (rtx, rtx, rtx);
10189extern rtx gen_atomic_addsi (rtx, rtx, rtx);
10190extern rtx gen_atomic_adddi (rtx, rtx, rtx);
10191extern rtx gen_atomic_subqi (rtx, rtx, rtx);
10192extern rtx gen_atomic_subhi (rtx, rtx, rtx);
10193extern rtx gen_atomic_subsi (rtx, rtx, rtx);
10194extern rtx gen_atomic_subdi (rtx, rtx, rtx);
10195extern rtx gen_atomic_andqi (rtx, rtx, rtx);
10196extern rtx gen_atomic_orqi (rtx, rtx, rtx);
10197extern rtx gen_atomic_xorqi (rtx, rtx, rtx);
10198extern rtx gen_atomic_andhi (rtx, rtx, rtx);
10199extern rtx gen_atomic_orhi (rtx, rtx, rtx);
10200extern rtx gen_atomic_xorhi (rtx, rtx, rtx);
10201extern rtx gen_atomic_andsi (rtx, rtx, rtx);
10202extern rtx gen_atomic_orsi (rtx, rtx, rtx);
10203extern rtx gen_atomic_xorsi (rtx, rtx, rtx);
10204extern rtx gen_atomic_anddi (rtx, rtx, rtx);
10205extern rtx gen_atomic_ordi (rtx, rtx, rtx);
10206extern rtx gen_atomic_xordi (rtx, rtx, rtx);
10207extern rtx gen_atomic_bit_test_and_sethi_1 (rtx, rtx, rtx);
10208extern rtx gen_atomic_bit_test_and_setsi_1 (rtx, rtx, rtx);
10209extern rtx gen_atomic_bit_test_and_setdi_1 (rtx, rtx, rtx);
10210extern rtx gen_atomic_bit_test_and_complementhi_1 (rtx, rtx, rtx);
10211extern rtx gen_atomic_bit_test_and_complementsi_1 (rtx, rtx, rtx);
10212extern rtx gen_atomic_bit_test_and_complementdi_1 (rtx, rtx, rtx);
10213extern rtx gen_atomic_bit_test_and_resethi_1 (rtx, rtx, rtx);
10214extern rtx gen_atomic_bit_test_and_resetsi_1 (rtx, rtx, rtx);
10215extern rtx gen_atomic_bit_test_and_resetdi_1 (rtx, rtx, rtx);
10216extern rtx gen_cbranchqi4 (rtx, rtx, rtx, rtx);
10217extern rtx gen_cbranchhi4 (rtx, rtx, rtx, rtx);
10218extern rtx gen_cbranchsi4 (rtx, rtx, rtx, rtx);
10219extern rtx gen_cbranchdi4 (rtx, rtx, rtx, rtx);
10220extern rtx gen_cbranchti4 (rtx, rtx, rtx, rtx);
10221extern rtx gen_cstoreqi4 (rtx, rtx, rtx, rtx);
10222extern rtx gen_cstorehi4 (rtx, rtx, rtx, rtx);
10223extern rtx gen_cstoresi4 (rtx, rtx, rtx, rtx);
10224extern rtx gen_cstoredi4 (rtx, rtx, rtx, rtx);
10225extern rtx gen_cmpsi_1 (rtx, rtx);
10226extern rtx gen_cmpdi_1 (rtx, rtx);
10227extern rtx gen_cmpqi_ext_3 (rtx, rtx);
10228extern rtx gen_cbranchxf4 (rtx, rtx, rtx, rtx);
10229extern rtx gen_cstorexf4 (rtx, rtx, rtx, rtx);
10230extern rtx gen_cbranchsf4 (rtx, rtx, rtx, rtx);
10231extern rtx gen_cbranchdf4 (rtx, rtx, rtx, rtx);
10232extern rtx gen_cstoresf4 (rtx, rtx, rtx, rtx);
10233extern rtx gen_cstoredf4 (rtx, rtx, rtx, rtx);
10234extern rtx gen_cbranchcc4 (rtx, rtx, rtx, rtx);
10235extern rtx gen_cstorecc4 (rtx, rtx, rtx, rtx);
10236extern rtx gen_reload_noff_store (rtx, rtx, rtx);
10237extern rtx gen_reload_noff_load (rtx, rtx, rtx);
10238extern rtx gen_movxi (rtx, rtx);
10239extern rtx gen_movoi (rtx, rtx);
10240extern rtx gen_movti (rtx, rtx);
10241extern rtx gen_movcdi (rtx, rtx);
10242extern rtx gen_movqi (rtx, rtx);
10243extern rtx gen_movhi (rtx, rtx);
10244extern rtx gen_movsi (rtx, rtx);
10245extern rtx gen_movdi (rtx, rtx);
10246extern rtx gen_movstrictqi (rtx, rtx);
10247extern rtx gen_movstricthi (rtx, rtx);
10248extern rtx gen_extvhi (rtx, rtx, rtx, rtx);
10249extern rtx gen_extvsi (rtx, rtx, rtx, rtx);
10250extern rtx gen_extzvhi (rtx, rtx, rtx, rtx);
10251extern rtx gen_extzvsi (rtx, rtx, rtx, rtx);
10252extern rtx gen_extzvdi (rtx, rtx, rtx, rtx);
10253extern rtx gen_insvhi (rtx, rtx, rtx, rtx);
10254extern rtx gen_insvsi (rtx, rtx, rtx, rtx);
10255extern rtx gen_insvdi (rtx, rtx, rtx, rtx);
10256extern rtx gen_movtf (rtx, rtx);
10257extern rtx gen_movsf (rtx, rtx);
10258extern rtx gen_movdf (rtx, rtx);
10259extern rtx gen_movxf (rtx, rtx);
10260extern rtx gen_zero_extendsidi2 (rtx, rtx);
10261extern rtx gen_zero_extendqisi2 (rtx, rtx);
10262extern rtx gen_zero_extendhisi2 (rtx, rtx);
10263extern rtx gen_zero_extendqihi2 (rtx, rtx);
10264extern rtx gen_extendsidi2 (rtx, rtx);
10265extern rtx gen_extendsfdf2 (rtx, rtx);
10266extern rtx gen_extendsfxf2 (rtx, rtx);
10267extern rtx gen_extenddfxf2 (rtx, rtx);
10268extern rtx gen_truncdfsf2 (rtx, rtx);
10269extern rtx gen_truncdfsf2_with_temp (rtx, rtx, rtx);
10270extern rtx gen_truncxfsf2 (rtx, rtx);
10271extern rtx gen_truncxfdf2 (rtx, rtx);
10272extern rtx gen_fix_truncxfdi2 (rtx, rtx);
10273extern rtx gen_fix_truncsfdi2 (rtx, rtx);
10274extern rtx gen_fix_truncdfdi2 (rtx, rtx);
10275extern rtx gen_fix_truncxfsi2 (rtx, rtx);
10276extern rtx gen_fix_truncsfsi2 (rtx, rtx);
10277extern rtx gen_fix_truncdfsi2 (rtx, rtx);
10278extern rtx gen_fix_truncsfhi2 (rtx, rtx);
10279extern rtx gen_fix_truncdfhi2 (rtx, rtx);
10280extern rtx gen_fix_truncxfhi2 (rtx, rtx);
10281extern rtx gen_fixuns_truncsfsi2 (rtx, rtx);
10282extern rtx gen_fixuns_truncdfsi2 (rtx, rtx);
10283extern rtx gen_fixuns_truncsfhi2 (rtx, rtx);
10284extern rtx gen_fixuns_truncdfhi2 (rtx, rtx);
10285extern rtx gen_floatsisf2 (rtx, rtx);
10286extern rtx gen_floatdisf2 (rtx, rtx);
10287extern rtx gen_floatsidf2 (rtx, rtx);
10288extern rtx gen_floatdidf2 (rtx, rtx);
10289extern rtx gen_floatunsqisf2 (rtx, rtx);
10290extern rtx gen_floatunshisf2 (rtx, rtx);
10291extern rtx gen_floatunsqidf2 (rtx, rtx);
10292extern rtx gen_floatunshidf2 (rtx, rtx);
10293extern rtx gen_floatunssisf2 (rtx, rtx);
10294extern rtx gen_floatunssidf2 (rtx, rtx);
10295extern rtx gen_floatunssixf2 (rtx, rtx);
10296extern rtx gen_floatunsdisf2 (rtx, rtx);
10297extern rtx gen_floatunsdidf2 (rtx, rtx);
10298extern rtx gen_addqi3 (rtx, rtx, rtx);
10299extern rtx gen_addhi3 (rtx, rtx, rtx);
10300extern rtx gen_addsi3 (rtx, rtx, rtx);
10301extern rtx gen_adddi3 (rtx, rtx, rtx);
10302extern rtx gen_addti3 (rtx, rtx, rtx);
10303extern rtx gen_addvqi4 (rtx, rtx, rtx, rtx);
10304extern rtx gen_addvhi4 (rtx, rtx, rtx, rtx);
10305extern rtx gen_addvsi4 (rtx, rtx, rtx, rtx);
10306extern rtx gen_addvdi4 (rtx, rtx, rtx, rtx);
10307extern rtx gen_uaddvqi4 (rtx, rtx, rtx, rtx);
10308extern rtx gen_uaddvhi4 (rtx, rtx, rtx, rtx);
10309extern rtx gen_uaddvsi4 (rtx, rtx, rtx, rtx);
10310extern rtx gen_uaddvdi4 (rtx, rtx, rtx, rtx);
10311extern rtx gen_subqi3 (rtx, rtx, rtx);
10312extern rtx gen_subhi3 (rtx, rtx, rtx);
10313extern rtx gen_subsi3 (rtx, rtx, rtx);
10314extern rtx gen_subdi3 (rtx, rtx, rtx);
10315extern rtx gen_subti3 (rtx, rtx, rtx);
10316extern rtx gen_subvqi4 (rtx, rtx, rtx, rtx);
10317extern rtx gen_subvhi4 (rtx, rtx, rtx, rtx);
10318extern rtx gen_subvsi4 (rtx, rtx, rtx, rtx);
10319extern rtx gen_subvdi4 (rtx, rtx, rtx, rtx);
10320extern rtx gen_usubvqi4 (rtx, rtx, rtx, rtx);
10321extern rtx gen_usubvhi4 (rtx, rtx, rtx, rtx);
10322extern rtx gen_usubvsi4 (rtx, rtx, rtx, rtx);
10323extern rtx gen_usubvdi4 (rtx, rtx, rtx, rtx);
10324extern rtx gen_addqi3_cconly_overflow (rtx, rtx);
10325extern rtx gen_addxf3 (rtx, rtx, rtx);
10326extern rtx gen_subxf3 (rtx, rtx, rtx);
10327extern rtx gen_addsf3 (rtx, rtx, rtx);
10328extern rtx gen_subsf3 (rtx, rtx, rtx);
10329extern rtx gen_adddf3 (rtx, rtx, rtx);
10330extern rtx gen_subdf3 (rtx, rtx, rtx);
10331extern rtx gen_mulhi3 (rtx, rtx, rtx);
10332extern rtx gen_mulsi3 (rtx, rtx, rtx);
10333extern rtx gen_muldi3 (rtx, rtx, rtx);
10334extern rtx gen_mulqi3 (rtx, rtx, rtx);
10335extern rtx gen_mulvhi4 (rtx, rtx, rtx, rtx);
10336extern rtx gen_mulvsi4 (rtx, rtx, rtx, rtx);
10337extern rtx gen_mulvdi4 (rtx, rtx, rtx, rtx);
10338extern rtx gen_umulvhi4 (rtx, rtx, rtx, rtx);
10339extern rtx gen_umulvsi4 (rtx, rtx, rtx, rtx);
10340extern rtx gen_umulvdi4 (rtx, rtx, rtx, rtx);
10341extern rtx gen_mulvqi4 (rtx, rtx, rtx, rtx);
10342extern rtx gen_umulvqi4 (rtx, rtx, rtx, rtx);
10343extern rtx gen_mulsidi3 (rtx, rtx, rtx);
10344extern rtx gen_umulsidi3 (rtx, rtx, rtx);
10345extern rtx gen_mulditi3 (rtx, rtx, rtx);
10346extern rtx gen_umulditi3 (rtx, rtx, rtx);
10347extern rtx gen_mulqihi3 (rtx, rtx, rtx);
10348extern rtx gen_umulqihi3 (rtx, rtx, rtx);
10349extern rtx gen_smulsi3_highpart (rtx, rtx, rtx);
10350extern rtx gen_umulsi3_highpart (rtx, rtx, rtx);
10351extern rtx gen_smuldi3_highpart (rtx, rtx, rtx);
10352extern rtx gen_umuldi3_highpart (rtx, rtx, rtx);
10353extern rtx gen_mulxf3 (rtx, rtx, rtx);
10354extern rtx gen_mulsf3 (rtx, rtx, rtx);
10355extern rtx gen_muldf3 (rtx, rtx, rtx);
10356extern rtx gen_divxf3 (rtx, rtx, rtx);
10357extern rtx gen_divsf3 (rtx, rtx, rtx);
10358extern rtx gen_divdf3 (rtx, rtx, rtx);
10359extern rtx gen_divmodhi4 (rtx, rtx, rtx, rtx);
10360extern rtx gen_divmodsi4 (rtx, rtx, rtx, rtx);
10361extern rtx gen_divmoddi4 (rtx, rtx, rtx, rtx);
10362extern rtx gen_divmodqi4 (rtx, rtx, rtx, rtx);
10363extern rtx gen_udivmodhi4 (rtx, rtx, rtx, rtx);
10364extern rtx gen_udivmodsi4 (rtx, rtx, rtx, rtx);
10365extern rtx gen_udivmoddi4 (rtx, rtx, rtx, rtx);
10366extern rtx gen_udivmodqi4 (rtx, rtx, rtx, rtx);
10367extern rtx gen_testsi_ccno_1 (rtx, rtx);
10368extern rtx gen_testqi_ccz_1 (rtx, rtx);
10369extern rtx gen_testdi_ccno_1 (rtx, rtx);
10370extern rtx gen_testqi_ext_1_ccno (rtx, rtx);
10371extern rtx gen_andqi3 (rtx, rtx, rtx);
10372extern rtx gen_andhi3 (rtx, rtx, rtx);
10373extern rtx gen_andsi3 (rtx, rtx, rtx);
10374extern rtx gen_anddi3 (rtx, rtx, rtx);
10375extern rtx gen_iorqi3 (rtx, rtx, rtx);
10376extern rtx gen_xorqi3 (rtx, rtx, rtx);
10377extern rtx gen_iorhi3 (rtx, rtx, rtx);
10378extern rtx gen_xorhi3 (rtx, rtx, rtx);
10379extern rtx gen_iorsi3 (rtx, rtx, rtx);
10380extern rtx gen_xorsi3 (rtx, rtx, rtx);
10381extern rtx gen_iordi3 (rtx, rtx, rtx);
10382extern rtx gen_xordi3 (rtx, rtx, rtx);
10383extern rtx gen_xorqi_ext_1_cc (rtx, rtx, rtx);
10384extern rtx gen_negqi2 (rtx, rtx);
10385extern rtx gen_neghi2 (rtx, rtx);
10386extern rtx gen_negsi2 (rtx, rtx);
10387extern rtx gen_negdi2 (rtx, rtx);
10388extern rtx gen_negti2 (rtx, rtx);
10389extern rtx gen_negvqi3 (rtx, rtx, rtx);
10390extern rtx gen_negvhi3 (rtx, rtx, rtx);
10391extern rtx gen_negvsi3 (rtx, rtx, rtx);
10392extern rtx gen_negvdi3 (rtx, rtx, rtx);
10393extern rtx gen_abssf2 (rtx, rtx);
10394extern rtx gen_negsf2 (rtx, rtx);
10395extern rtx gen_absdf2 (rtx, rtx);
10396extern rtx gen_negdf2 (rtx, rtx);
10397extern rtx gen_absxf2 (rtx, rtx);
10398extern rtx gen_negxf2 (rtx, rtx);
10399extern rtx gen_abstf2 (rtx, rtx);
10400extern rtx gen_negtf2 (rtx, rtx);
10401extern rtx gen_copysignsf3 (rtx, rtx, rtx);
10402extern rtx gen_copysigndf3 (rtx, rtx, rtx);
10403extern rtx gen_copysigntf3 (rtx, rtx, rtx);
10404extern rtx gen_one_cmplqi2 (rtx, rtx);
10405extern rtx gen_one_cmplhi2 (rtx, rtx);
10406extern rtx gen_one_cmplsi2 (rtx, rtx);
10407extern rtx gen_one_cmpldi2 (rtx, rtx);
10408extern rtx gen_ashlqi3 (rtx, rtx, rtx);
10409extern rtx gen_ashlhi3 (rtx, rtx, rtx);
10410extern rtx gen_ashlsi3 (rtx, rtx, rtx);
10411extern rtx gen_ashldi3 (rtx, rtx, rtx);
10412extern rtx gen_ashlti3 (rtx, rtx, rtx);
10413extern rtx gen_x86_shiftsi_adj_1 (rtx, rtx, rtx, rtx);
10414extern rtx gen_x86_shiftdi_adj_1 (rtx, rtx, rtx, rtx);
10415extern rtx gen_x86_shiftsi_adj_2 (rtx, rtx, rtx);
10416extern rtx gen_x86_shiftdi_adj_2 (rtx, rtx, rtx);
10417extern rtx gen_lshrqi3 (rtx, rtx, rtx);
10418extern rtx gen_ashrqi3 (rtx, rtx, rtx);
10419extern rtx gen_lshrhi3 (rtx, rtx, rtx);
10420extern rtx gen_ashrhi3 (rtx, rtx, rtx);
10421extern rtx gen_lshrsi3 (rtx, rtx, rtx);
10422extern rtx gen_ashrsi3 (rtx, rtx, rtx);
10423extern rtx gen_lshrdi3 (rtx, rtx, rtx);
10424extern rtx gen_ashrdi3 (rtx, rtx, rtx);
10425extern rtx gen_lshrti3 (rtx, rtx, rtx);
10426extern rtx gen_ashrti3 (rtx, rtx, rtx);
10427extern rtx gen_x86_shiftsi_adj_3 (rtx, rtx, rtx);
10428extern rtx gen_x86_shiftdi_adj_3 (rtx, rtx, rtx);
10429extern rtx gen_rotlti3 (rtx, rtx, rtx);
10430extern rtx gen_rotrti3 (rtx, rtx, rtx);
10431extern rtx gen_rotldi3 (rtx, rtx, rtx);
10432extern rtx gen_rotrdi3 (rtx, rtx, rtx);
10433extern rtx gen_rotlqi3 (rtx, rtx, rtx);
10434extern rtx gen_rotrqi3 (rtx, rtx, rtx);
10435extern rtx gen_rotlhi3 (rtx, rtx, rtx);
10436extern rtx gen_rotrhi3 (rtx, rtx, rtx);
10437extern rtx gen_rotlsi3 (rtx, rtx, rtx);
10438extern rtx gen_rotrsi3 (rtx, rtx, rtx);
10439extern rtx gen_indirect_jump (rtx);
10440extern rtx gen_tablejump (rtx, rtx);
10441extern rtx gen_call (rtx, rtx, rtx);
10442extern rtx gen_sibcall (rtx, rtx, rtx);
10443extern rtx gen_call_pop (rtx, rtx, rtx, rtx);
10444extern rtx gen_call_value (rtx, rtx, rtx, rtx);
10445extern rtx gen_sibcall_value (rtx, rtx, rtx, rtx);
10446extern rtx gen_call_value_pop (rtx, rtx, rtx, rtx, rtx);
10447extern rtx gen_untyped_call (rtx, rtx, rtx);
10448extern rtx gen_memory_blockage (void);
10449extern rtx gen_return (void);
10450extern rtx gen_simple_return (void);
10451extern rtx gen_prologue (void);
10452extern rtx gen_set_got (rtx);
10453extern rtx gen_set_got_labelled (rtx, rtx);
10454extern rtx gen_epilogue (void);
10455extern rtx gen_sibcall_epilogue (void);
10456extern rtx gen_eh_return (rtx);
10457extern rtx gen_split_stack_prologue (void);
10458extern rtx gen_split_stack_space_check (rtx, rtx);
10459extern rtx gen_ffssi2 (rtx, rtx);
10460extern rtx gen_ffsdi2 (rtx, rtx);
10461extern rtx gen_clzsi2 (rtx, rtx);
10462extern rtx gen_clzdi2 (rtx, rtx);
10463extern rtx gen_bmi2_bzhi_si3 (rtx, rtx, rtx);
10464extern rtx gen_bmi2_bzhi_di3 (rtx, rtx, rtx);
10465extern rtx gen_bswapdi2 (rtx, rtx);
10466extern rtx gen_bswapsi2 (rtx, rtx);
10467extern rtx gen_paritydi2 (rtx, rtx);
10468extern rtx gen_paritysi2 (rtx, rtx);
10469extern rtx gen_tls_global_dynamic_32 (rtx, rtx, rtx, rtx);
10470extern rtx gen_tls_global_dynamic_64_si (rtx, rtx, rtx);
10471extern rtx gen_tls_global_dynamic_64_di (rtx, rtx, rtx);
10472extern rtx gen_tls_local_dynamic_base_32 (rtx, rtx, rtx);
10473extern rtx gen_tls_local_dynamic_base_64_si (rtx, rtx);
10474extern rtx gen_tls_local_dynamic_base_64_di (rtx, rtx);
10475extern rtx gen_tls_dynamic_gnu2_32 (rtx, rtx, rtx);
10476extern rtx gen_tls_dynamic_gnu2_64 (rtx, rtx);
10477extern rtx gen_rsqrtsf2 (rtx, rtx);
10478extern rtx gen_sqrtsf2 (rtx, rtx);
10479extern rtx gen_sqrtdf2 (rtx, rtx);
10480extern rtx gen_fmodxf3 (rtx, rtx, rtx);
10481extern rtx gen_fmodsf3 (rtx, rtx, rtx);
10482extern rtx gen_fmoddf3 (rtx, rtx, rtx);
10483extern rtx gen_remainderxf3 (rtx, rtx, rtx);
10484extern rtx gen_remaindersf3 (rtx, rtx, rtx);
10485extern rtx gen_remainderdf3 (rtx, rtx, rtx);
10486extern rtx gen_sincossf3 (rtx, rtx, rtx);
10487extern rtx gen_sincosdf3 (rtx, rtx, rtx);
10488extern rtx gen_tanxf2 (rtx, rtx);
10489extern rtx gen_tansf2 (rtx, rtx);
10490extern rtx gen_tandf2 (rtx, rtx);
10491extern rtx gen_atan2xf3 (rtx, rtx, rtx);
10492extern rtx gen_atan2sf3 (rtx, rtx, rtx);
10493extern rtx gen_atan2df3 (rtx, rtx, rtx);
10494extern rtx gen_atanxf2 (rtx, rtx);
10495extern rtx gen_atansf2 (rtx, rtx);
10496extern rtx gen_atandf2 (rtx, rtx);
10497extern rtx gen_asinxf2 (rtx, rtx);
10498extern rtx gen_asinsf2 (rtx, rtx);
10499extern rtx gen_asindf2 (rtx, rtx);
10500extern rtx gen_acosxf2 (rtx, rtx);
10501extern rtx gen_acossf2 (rtx, rtx);
10502extern rtx gen_acosdf2 (rtx, rtx);
10503extern rtx gen_logxf2 (rtx, rtx);
10504extern rtx gen_logsf2 (rtx, rtx);
10505extern rtx gen_logdf2 (rtx, rtx);
10506extern rtx gen_log10xf2 (rtx, rtx);
10507extern rtx gen_log10sf2 (rtx, rtx);
10508extern rtx gen_log10df2 (rtx, rtx);
10509extern rtx gen_log2xf2 (rtx, rtx);
10510extern rtx gen_log2sf2 (rtx, rtx);
10511extern rtx gen_log2df2 (rtx, rtx);
10512extern rtx gen_log1pxf2 (rtx, rtx);
10513extern rtx gen_log1psf2 (rtx, rtx);
10514extern rtx gen_log1pdf2 (rtx, rtx);
10515extern rtx gen_logbxf2 (rtx, rtx);
10516extern rtx gen_logbsf2 (rtx, rtx);
10517extern rtx gen_logbdf2 (rtx, rtx);
10518extern rtx gen_ilogbxf2 (rtx, rtx);
10519extern rtx gen_ilogbsf2 (rtx, rtx);
10520extern rtx gen_ilogbdf2 (rtx, rtx);
10521extern rtx gen_expNcorexf3 (rtx, rtx, rtx);
10522extern rtx gen_expxf2 (rtx, rtx);
10523extern rtx gen_expsf2 (rtx, rtx);
10524extern rtx gen_expdf2 (rtx, rtx);
10525extern rtx gen_exp10xf2 (rtx, rtx);
10526extern rtx gen_exp10sf2 (rtx, rtx);
10527extern rtx gen_exp10df2 (rtx, rtx);
10528extern rtx gen_exp2xf2 (rtx, rtx);
10529extern rtx gen_exp2sf2 (rtx, rtx);
10530extern rtx gen_exp2df2 (rtx, rtx);
10531extern rtx gen_expm1xf2 (rtx, rtx);
10532extern rtx gen_expm1sf2 (rtx, rtx);
10533extern rtx gen_expm1df2 (rtx, rtx);
10534extern rtx gen_ldexpxf3 (rtx, rtx, rtx);
10535extern rtx gen_ldexpsf3 (rtx, rtx, rtx);
10536extern rtx gen_ldexpdf3 (rtx, rtx, rtx);
10537extern rtx gen_scalbxf3 (rtx, rtx, rtx);
10538extern rtx gen_scalbsf3 (rtx, rtx, rtx);
10539extern rtx gen_scalbdf3 (rtx, rtx, rtx);
10540extern rtx gen_significandxf2 (rtx, rtx);
10541extern rtx gen_significandsf2 (rtx, rtx);
10542extern rtx gen_significanddf2 (rtx, rtx);
10543extern rtx gen_rintsf2 (rtx, rtx);
10544extern rtx gen_rintdf2 (rtx, rtx);
10545extern rtx gen_roundsf2 (rtx, rtx);
10546extern rtx gen_rounddf2 (rtx, rtx);
10547extern rtx gen_roundxf2 (rtx, rtx);
10548extern rtx gen_lrintxfhi2 (rtx, rtx);
10549extern rtx gen_lrintxfsi2 (rtx, rtx);
10550extern rtx gen_lrintxfdi2 (rtx, rtx);
10551extern rtx gen_lrintsfsi2 (rtx, rtx);
10552extern rtx gen_lrintsfdi2 (rtx, rtx);
10553extern rtx gen_lrintdfsi2 (rtx, rtx);
10554extern rtx gen_lrintdfdi2 (rtx, rtx);
10555extern rtx gen_lroundsfhi2 (rtx, rtx);
10556extern rtx gen_lrounddfhi2 (rtx, rtx);
10557extern rtx gen_lroundxfhi2 (rtx, rtx);
10558extern rtx gen_lroundsfsi2 (rtx, rtx);
10559extern rtx gen_lrounddfsi2 (rtx, rtx);
10560extern rtx gen_lroundxfsi2 (rtx, rtx);
10561extern rtx gen_lroundsfdi2 (rtx, rtx);
10562extern rtx gen_lrounddfdi2 (rtx, rtx);
10563extern rtx gen_lroundxfdi2 (rtx, rtx);
10564extern rtx gen_floorxf2 (rtx, rtx);
10565extern rtx gen_ceilxf2 (rtx, rtx);
10566extern rtx gen_btruncxf2 (rtx, rtx);
10567extern rtx gen_floorsf2 (rtx, rtx);
10568extern rtx gen_ceilsf2 (rtx, rtx);
10569extern rtx gen_btruncsf2 (rtx, rtx);
10570extern rtx gen_floordf2 (rtx, rtx);
10571extern rtx gen_ceildf2 (rtx, rtx);
10572extern rtx gen_btruncdf2 (rtx, rtx);
10573extern rtx gen_nearbyintxf2 (rtx, rtx);
10574extern rtx gen_nearbyintsf2 (rtx, rtx);
10575extern rtx gen_nearbyintdf2 (rtx, rtx);
10576extern rtx gen_lfloorxfhi2 (rtx, rtx);
10577extern rtx gen_lceilxfhi2 (rtx, rtx);
10578extern rtx gen_lfloorxfsi2 (rtx, rtx);
10579extern rtx gen_lceilxfsi2 (rtx, rtx);
10580extern rtx gen_lfloorxfdi2 (rtx, rtx);
10581extern rtx gen_lceilxfdi2 (rtx, rtx);
10582extern rtx gen_lfloorsfsi2 (rtx, rtx);
10583extern rtx gen_lceilsfsi2 (rtx, rtx);
10584extern rtx gen_lfloorsfdi2 (rtx, rtx);
10585extern rtx gen_lceilsfdi2 (rtx, rtx);
10586extern rtx gen_lfloordfsi2 (rtx, rtx);
10587extern rtx gen_lceildfsi2 (rtx, rtx);
10588extern rtx gen_lfloordfdi2 (rtx, rtx);
10589extern rtx gen_lceildfdi2 (rtx, rtx);
10590extern rtx gen_isinfxf2 (rtx, rtx);
10591extern rtx gen_isinfsf2 (rtx, rtx);
10592extern rtx gen_isinfdf2 (rtx, rtx);
10593extern rtx gen_signbittf2 (rtx, rtx);
10594extern rtx gen_signbitxf2 (rtx, rtx);
10595extern rtx gen_signbitdf2 (rtx, rtx);
10596extern rtx gen_signbitsf2 (rtx, rtx);
10597extern rtx gen_movmemsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10598extern rtx gen_movmemdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10599extern rtx gen_strmov (rtx, rtx, rtx, rtx);
10600extern rtx gen_strmov_singleop (rtx, rtx, rtx, rtx, rtx, rtx);
10601extern rtx gen_rep_mov (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10602extern rtx gen_setmemsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10603extern rtx gen_setmemdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
10604extern rtx gen_strset (rtx, rtx, rtx);
10605extern rtx gen_strset_singleop (rtx, rtx, rtx, rtx);
10606extern rtx gen_rep_stos (rtx, rtx, rtx, rtx, rtx);
10607extern rtx gen_cmpstrnsi (rtx, rtx, rtx, rtx, rtx);
10608extern rtx gen_cmpintqi (rtx);
10609extern rtx gen_cmpstrnqi_nz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10610extern rtx gen_cmpstrnqi_1 (rtx, rtx, rtx, rtx, rtx, rtx);
10611extern rtx gen_strlensi (rtx, rtx, rtx, rtx);
10612extern rtx gen_strlendi (rtx, rtx, rtx, rtx);
10613extern rtx gen_strlenqi_1 (rtx, rtx, rtx);
10614extern rtx gen_movqicc (rtx, rtx, rtx, rtx);
10615extern rtx gen_movhicc (rtx, rtx, rtx, rtx);
10616extern rtx gen_movsicc (rtx, rtx, rtx, rtx);
10617extern rtx gen_movdicc (rtx, rtx, rtx, rtx);
10618extern rtx gen_x86_movsicc_0_m1 (rtx, rtx, rtx);
10619extern rtx gen_x86_movdicc_0_m1 (rtx, rtx, rtx);
10620extern rtx gen_movsfcc (rtx, rtx, rtx, rtx);
10621extern rtx gen_movdfcc (rtx, rtx, rtx, rtx);
10622extern rtx gen_movxfcc (rtx, rtx, rtx, rtx);
10623extern rtx gen_addqicc (rtx, rtx, rtx, rtx);
10624extern rtx gen_addhicc (rtx, rtx, rtx, rtx);
10625extern rtx gen_addsicc (rtx, rtx, rtx, rtx);
10626extern rtx gen_adddicc (rtx, rtx, rtx, rtx);
10627extern rtx gen_allocate_stack (rtx, rtx);
10628extern rtx gen_probe_stack (rtx);
10629extern rtx gen_builtin_setjmp_receiver (rtx);
10630extern rtx gen_prefetch (rtx, rtx, rtx);
10631extern rtx gen_stack_protect_set (rtx, rtx);
10632extern rtx gen_stack_protect_test (rtx, rtx, rtx);
10633extern rtx gen_lwp_llwpcb (rtx);
10634extern rtx gen_lwp_slwpcb (rtx);
10635extern rtx gen_lwp_lwpvalsi3 (rtx, rtx, rtx, rtx);
10636extern rtx gen_lwp_lwpvaldi3 (rtx, rtx, rtx, rtx);
10637extern rtx gen_lwp_lwpinssi3 (rtx, rtx, rtx, rtx);
10638extern rtx gen_lwp_lwpinsdi3 (rtx, rtx, rtx, rtx);
10639extern rtx gen_pause (void);
10640extern rtx gen_xbegin (rtx);
10641extern rtx gen_xtest (rtx);
10642extern rtx gen_bnd32_mk (rtx, rtx, rtx);
10643extern rtx gen_bnd64_mk (rtx, rtx, rtx);
10644extern rtx gen_movbnd32 (rtx, rtx);
10645extern rtx gen_movbnd64 (rtx, rtx);
10646extern rtx gen_bnd32_cl (rtx, rtx);
10647extern rtx gen_bnd32_cu (rtx, rtx);
10648extern rtx gen_bnd32_cn (rtx, rtx);
10649extern rtx gen_bnd64_cl (rtx, rtx);
10650extern rtx gen_bnd64_cu (rtx, rtx);
10651extern rtx gen_bnd64_cn (rtx, rtx);
10652extern rtx gen_bnd32_ldx (rtx, rtx, rtx);
10653extern rtx gen_bnd64_ldx (rtx, rtx, rtx);
10654extern rtx gen_bnd32_stx (rtx, rtx, rtx);
10655extern rtx gen_bnd64_stx (rtx, rtx, rtx);
10656extern rtx gen_rdpkru (rtx);
10657extern rtx gen_wrpkru (rtx);
10658extern rtx gen_movv8qi (rtx, rtx);
10659extern rtx gen_movv4hi (rtx, rtx);
10660extern rtx gen_movv2si (rtx, rtx);
10661extern rtx gen_movv1di (rtx, rtx);
10662extern rtx gen_movv2sf (rtx, rtx);
10663extern rtx gen_movmisalignv8qi (rtx, rtx);
10664extern rtx gen_movmisalignv4hi (rtx, rtx);
10665extern rtx gen_movmisalignv2si (rtx, rtx);
10666extern rtx gen_movmisalignv1di (rtx, rtx);
10667extern rtx gen_movmisalignv2sf (rtx, rtx);
10668extern rtx gen_mmx_addv2sf3 (rtx, rtx, rtx);
10669extern rtx gen_mmx_subv2sf3 (rtx, rtx, rtx);
10670extern rtx gen_mmx_subrv2sf3 (rtx, rtx, rtx);
10671extern rtx gen_mmx_mulv2sf3 (rtx, rtx, rtx);
10672extern rtx gen_mmx_smaxv2sf3 (rtx, rtx, rtx);
10673extern rtx gen_mmx_sminv2sf3 (rtx, rtx, rtx);
10674extern rtx gen_mmx_eqv2sf3 (rtx, rtx, rtx);
10675extern rtx gen_vec_setv2sf (rtx, rtx, rtx);
10676extern rtx gen_vec_extractv2sf (rtx, rtx, rtx);
10677extern rtx gen_vec_initv2sf (rtx, rtx);
10678extern rtx gen_mmx_addv8qi3 (rtx, rtx, rtx);
10679extern rtx gen_mmx_subv8qi3 (rtx, rtx, rtx);
10680extern rtx gen_mmx_addv4hi3 (rtx, rtx, rtx);
10681extern rtx gen_mmx_subv4hi3 (rtx, rtx, rtx);
10682extern rtx gen_mmx_addv2si3 (rtx, rtx, rtx);
10683extern rtx gen_mmx_subv2si3 (rtx, rtx, rtx);
10684extern rtx gen_mmx_addv1di3 (rtx, rtx, rtx);
10685extern rtx gen_mmx_subv1di3 (rtx, rtx, rtx);
10686extern rtx gen_mmx_ssaddv8qi3 (rtx, rtx, rtx);
10687extern rtx gen_mmx_usaddv8qi3 (rtx, rtx, rtx);
10688extern rtx gen_mmx_sssubv8qi3 (rtx, rtx, rtx);
10689extern rtx gen_mmx_ussubv8qi3 (rtx, rtx, rtx);
10690extern rtx gen_mmx_ssaddv4hi3 (rtx, rtx, rtx);
10691extern rtx gen_mmx_usaddv4hi3 (rtx, rtx, rtx);
10692extern rtx gen_mmx_sssubv4hi3 (rtx, rtx, rtx);
10693extern rtx gen_mmx_ussubv4hi3 (rtx, rtx, rtx);
10694extern rtx gen_mmx_mulv4hi3 (rtx, rtx, rtx);
10695extern rtx gen_mmx_smulv4hi3_highpart (rtx, rtx, rtx);
10696extern rtx gen_mmx_umulv4hi3_highpart (rtx, rtx, rtx);
10697extern rtx gen_mmx_pmaddwd (rtx, rtx, rtx);
10698extern rtx gen_mmx_pmulhrwv4hi3 (rtx, rtx, rtx);
10699extern rtx gen_sse2_umulv1siv1di3 (rtx, rtx, rtx);
10700extern rtx gen_mmx_smaxv4hi3 (rtx, rtx, rtx);
10701extern rtx gen_mmx_sminv4hi3 (rtx, rtx, rtx);
10702extern rtx gen_mmx_umaxv8qi3 (rtx, rtx, rtx);
10703extern rtx gen_mmx_uminv8qi3 (rtx, rtx, rtx);
10704extern rtx gen_mmx_eqv8qi3 (rtx, rtx, rtx);
10705extern rtx gen_mmx_eqv4hi3 (rtx, rtx, rtx);
10706extern rtx gen_mmx_eqv2si3 (rtx, rtx, rtx);
10707extern rtx gen_mmx_andv8qi3 (rtx, rtx, rtx);
10708extern rtx gen_mmx_iorv8qi3 (rtx, rtx, rtx);
10709extern rtx gen_mmx_xorv8qi3 (rtx, rtx, rtx);
10710extern rtx gen_mmx_andv4hi3 (rtx, rtx, rtx);
10711extern rtx gen_mmx_iorv4hi3 (rtx, rtx, rtx);
10712extern rtx gen_mmx_xorv4hi3 (rtx, rtx, rtx);
10713extern rtx gen_mmx_andv2si3 (rtx, rtx, rtx);
10714extern rtx gen_mmx_iorv2si3 (rtx, rtx, rtx);
10715extern rtx gen_mmx_xorv2si3 (rtx, rtx, rtx);
10716extern rtx gen_mmx_pinsrw (rtx, rtx, rtx, rtx);
10717extern rtx gen_mmx_pshufw (rtx, rtx, rtx);
10718extern rtx gen_vec_setv2si (rtx, rtx, rtx);
10719extern rtx gen_vec_extractv2si (rtx, rtx, rtx);
10720extern rtx gen_vec_initv2si (rtx, rtx);
10721extern rtx gen_vec_setv4hi (rtx, rtx, rtx);
10722extern rtx gen_vec_extractv4hi (rtx, rtx, rtx);
10723extern rtx gen_vec_initv4hi (rtx, rtx);
10724extern rtx gen_vec_setv8qi (rtx, rtx, rtx);
10725extern rtx gen_vec_extractv8qi (rtx, rtx, rtx);
10726extern rtx gen_vec_initv8qi (rtx, rtx);
10727extern rtx gen_mmx_uavgv8qi3 (rtx, rtx, rtx);
10728extern rtx gen_mmx_uavgv4hi3 (rtx, rtx, rtx);
10729extern rtx gen_mmx_maskmovq (rtx, rtx, rtx);
10730extern rtx gen_mmx_emms (void);
10731extern rtx gen_mmx_femms (void);
10732extern rtx gen_movv64qi (rtx, rtx);
10733extern rtx gen_movv32qi (rtx, rtx);
10734extern rtx gen_movv16qi (rtx, rtx);
10735extern rtx gen_movv32hi (rtx, rtx);
10736extern rtx gen_movv16hi (rtx, rtx);
10737extern rtx gen_movv8hi (rtx, rtx);
10738extern rtx gen_movv16si (rtx, rtx);
10739extern rtx gen_movv8si (rtx, rtx);
10740extern rtx gen_movv4si (rtx, rtx);
10741extern rtx gen_movv8di (rtx, rtx);
10742extern rtx gen_movv4di (rtx, rtx);
10743extern rtx gen_movv2di (rtx, rtx);
10744extern rtx gen_movv4ti (rtx, rtx);
10745extern rtx gen_movv2ti (rtx, rtx);
10746extern rtx gen_movv1ti (rtx, rtx);
10747extern rtx gen_movv16sf (rtx, rtx);
10748extern rtx gen_movv8sf (rtx, rtx);
10749extern rtx gen_movv4sf (rtx, rtx);
10750extern rtx gen_movv8df (rtx, rtx);
10751extern rtx gen_movv4df (rtx, rtx);
10752extern rtx gen_movv2df (rtx, rtx);
10753extern rtx gen_movmisalignv64qi (rtx, rtx);
10754extern rtx gen_movmisalignv32qi (rtx, rtx);
10755extern rtx gen_movmisalignv16qi (rtx, rtx);
10756extern rtx gen_movmisalignv32hi (rtx, rtx);
10757extern rtx gen_movmisalignv16hi (rtx, rtx);
10758extern rtx gen_movmisalignv8hi (rtx, rtx);
10759extern rtx gen_movmisalignv16si (rtx, rtx);
10760extern rtx gen_movmisalignv8si (rtx, rtx);
10761extern rtx gen_movmisalignv4si (rtx, rtx);
10762extern rtx gen_movmisalignv8di (rtx, rtx);
10763extern rtx gen_movmisalignv4di (rtx, rtx);
10764extern rtx gen_movmisalignv2di (rtx, rtx);
10765extern rtx gen_movmisalignv4ti (rtx, rtx);
10766extern rtx gen_movmisalignv2ti (rtx, rtx);
10767extern rtx gen_movmisalignv1ti (rtx, rtx);
10768extern rtx gen_movmisalignv16sf (rtx, rtx);
10769extern rtx gen_movmisalignv8sf (rtx, rtx);
10770extern rtx gen_movmisalignv4sf (rtx, rtx);
10771extern rtx gen_movmisalignv8df (rtx, rtx);
10772extern rtx gen_movmisalignv4df (rtx, rtx);
10773extern rtx gen_movmisalignv2df (rtx, rtx);
10774extern rtx gen_storentdi (rtx, rtx);
10775extern rtx gen_storentsi (rtx, rtx);
10776extern rtx gen_storentsf (rtx, rtx);
10777extern rtx gen_storentdf (rtx, rtx);
10778extern rtx gen_storentv8di (rtx, rtx);
10779extern rtx gen_storentv4di (rtx, rtx);
10780extern rtx gen_storentv2di (rtx, rtx);
10781extern rtx gen_storentv16sf (rtx, rtx);
10782extern rtx gen_storentv8sf (rtx, rtx);
10783extern rtx gen_storentv4sf (rtx, rtx);
10784extern rtx gen_storentv8df (rtx, rtx);
10785extern rtx gen_storentv4df (rtx, rtx);
10786extern rtx gen_storentv2df (rtx, rtx);
10787extern rtx gen_kmovb (rtx, rtx);
10788extern rtx gen_kmovw (rtx, rtx);
10789extern rtx gen_kmovd (rtx, rtx);
10790extern rtx gen_kmovq (rtx, rtx);
10791extern rtx gen_absv16sf2 (rtx, rtx);
10792extern rtx gen_negv16sf2 (rtx, rtx);
10793extern rtx gen_absv8sf2 (rtx, rtx);
10794extern rtx gen_negv8sf2 (rtx, rtx);
10795extern rtx gen_absv4sf2 (rtx, rtx);
10796extern rtx gen_negv4sf2 (rtx, rtx);
10797extern rtx gen_absv8df2 (rtx, rtx);
10798extern rtx gen_negv8df2 (rtx, rtx);
10799extern rtx gen_absv4df2 (rtx, rtx);
10800extern rtx gen_negv4df2 (rtx, rtx);
10801extern rtx gen_absv2df2 (rtx, rtx);
10802extern rtx gen_negv2df2 (rtx, rtx);
10803extern rtx gen_addv16sf3 (rtx, rtx, rtx);
10804extern rtx gen_addv16sf3_round (rtx, rtx, rtx, rtx);
10805extern rtx gen_addv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
10806extern rtx gen_addv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10807extern rtx gen_subv16sf3 (rtx, rtx, rtx);
10808extern rtx gen_subv16sf3_round (rtx, rtx, rtx, rtx);
10809extern rtx gen_subv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
10810extern rtx gen_subv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10811extern rtx gen_addv8sf3 (rtx, rtx, rtx);
10812static inline rtx gen_addv8sf3_round (rtx, rtx, rtx, rtx);
10813static inline rtx
10814gen_addv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10815{
10816 return 0;
10817}
10818extern rtx gen_addv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
10819static inline rtx gen_addv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10820static inline rtx
10821gen_addv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10822{
10823 return 0;
10824}
10825extern rtx gen_subv8sf3 (rtx, rtx, rtx);
10826static inline rtx gen_subv8sf3_round (rtx, rtx, rtx, rtx);
10827static inline rtx
10828gen_subv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10829{
10830 return 0;
10831}
10832extern rtx gen_subv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
10833static inline rtx gen_subv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10834static inline rtx
10835gen_subv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10836{
10837 return 0;
10838}
10839extern rtx gen_addv4sf3 (rtx, rtx, rtx);
10840static inline rtx gen_addv4sf3_round (rtx, rtx, rtx, rtx);
10841static inline rtx
10842gen_addv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10843{
10844 return 0;
10845}
10846extern rtx gen_addv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
10847static inline rtx gen_addv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10848static inline rtx
10849gen_addv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10850{
10851 return 0;
10852}
10853extern rtx gen_subv4sf3 (rtx, rtx, rtx);
10854static inline rtx gen_subv4sf3_round (rtx, rtx, rtx, rtx);
10855static inline rtx
10856gen_subv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10857{
10858 return 0;
10859}
10860extern rtx gen_subv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
10861static inline rtx gen_subv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10862static inline rtx
10863gen_subv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10864{
10865 return 0;
10866}
10867extern rtx gen_addv8df3 (rtx, rtx, rtx);
10868extern rtx gen_addv8df3_round (rtx, rtx, rtx, rtx);
10869extern rtx gen_addv8df3_mask (rtx, rtx, rtx, rtx, rtx);
10870extern rtx gen_addv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10871extern rtx gen_subv8df3 (rtx, rtx, rtx);
10872extern rtx gen_subv8df3_round (rtx, rtx, rtx, rtx);
10873extern rtx gen_subv8df3_mask (rtx, rtx, rtx, rtx, rtx);
10874extern rtx gen_subv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10875extern rtx gen_addv4df3 (rtx, rtx, rtx);
10876static inline rtx gen_addv4df3_round (rtx, rtx, rtx, rtx);
10877static inline rtx
10878gen_addv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10879{
10880 return 0;
10881}
10882extern rtx gen_addv4df3_mask (rtx, rtx, rtx, rtx, rtx);
10883static inline rtx gen_addv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10884static inline rtx
10885gen_addv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10886{
10887 return 0;
10888}
10889extern rtx gen_subv4df3 (rtx, rtx, rtx);
10890static inline rtx gen_subv4df3_round (rtx, rtx, rtx, rtx);
10891static inline rtx
10892gen_subv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10893{
10894 return 0;
10895}
10896extern rtx gen_subv4df3_mask (rtx, rtx, rtx, rtx, rtx);
10897static inline rtx gen_subv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10898static inline rtx
10899gen_subv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10900{
10901 return 0;
10902}
10903extern rtx gen_addv2df3 (rtx, rtx, rtx);
10904static inline rtx gen_addv2df3_round (rtx, rtx, rtx, rtx);
10905static inline rtx
10906gen_addv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10907{
10908 return 0;
10909}
10910extern rtx gen_addv2df3_mask (rtx, rtx, rtx, rtx, rtx);
10911static inline rtx gen_addv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10912static inline rtx
10913gen_addv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10914{
10915 return 0;
10916}
10917extern rtx gen_subv2df3 (rtx, rtx, rtx);
10918static inline rtx gen_subv2df3_round (rtx, rtx, rtx, rtx);
10919static inline rtx
10920gen_subv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10921{
10922 return 0;
10923}
10924extern rtx gen_subv2df3_mask (rtx, rtx, rtx, rtx, rtx);
10925static inline rtx gen_subv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10926static inline rtx
10927gen_subv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10928{
10929 return 0;
10930}
10931extern rtx gen_mulv16sf3 (rtx, rtx, rtx);
10932extern rtx gen_mulv16sf3_round (rtx, rtx, rtx, rtx);
10933extern rtx gen_mulv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
10934extern rtx gen_mulv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10935extern rtx gen_mulv8sf3 (rtx, rtx, rtx);
10936static inline rtx gen_mulv8sf3_round (rtx, rtx, rtx, rtx);
10937static inline rtx
10938gen_mulv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10939{
10940 return 0;
10941}
10942extern rtx gen_mulv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
10943static inline rtx gen_mulv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10944static inline rtx
10945gen_mulv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10946{
10947 return 0;
10948}
10949extern rtx gen_mulv4sf3 (rtx, rtx, rtx);
10950static inline rtx gen_mulv4sf3_round (rtx, rtx, rtx, rtx);
10951static inline rtx
10952gen_mulv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10953{
10954 return 0;
10955}
10956extern rtx gen_mulv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
10957static inline rtx gen_mulv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10958static inline rtx
10959gen_mulv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10960{
10961 return 0;
10962}
10963extern rtx gen_mulv8df3 (rtx, rtx, rtx);
10964extern rtx gen_mulv8df3_round (rtx, rtx, rtx, rtx);
10965extern rtx gen_mulv8df3_mask (rtx, rtx, rtx, rtx, rtx);
10966extern rtx gen_mulv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10967extern rtx gen_mulv4df3 (rtx, rtx, rtx);
10968static inline rtx gen_mulv4df3_round (rtx, rtx, rtx, rtx);
10969static inline rtx
10970gen_mulv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10971{
10972 return 0;
10973}
10974extern rtx gen_mulv4df3_mask (rtx, rtx, rtx, rtx, rtx);
10975static inline rtx gen_mulv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10976static inline rtx
10977gen_mulv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10978{
10979 return 0;
10980}
10981extern rtx gen_mulv2df3 (rtx, rtx, rtx);
10982static inline rtx gen_mulv2df3_round (rtx, rtx, rtx, rtx);
10983static inline rtx
10984gen_mulv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
10985{
10986 return 0;
10987}
10988extern rtx gen_mulv2df3_mask (rtx, rtx, rtx, rtx, rtx);
10989static inline rtx gen_mulv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
10990static inline rtx
10991gen_mulv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
10992{
10993 return 0;
10994}
10995extern rtx gen_divv8df3 (rtx, rtx, rtx);
10996extern rtx gen_divv4df3 (rtx, rtx, rtx);
10997extern rtx gen_divv2df3 (rtx, rtx, rtx);
10998extern rtx gen_divv16sf3 (rtx, rtx, rtx);
10999extern rtx gen_divv8sf3 (rtx, rtx, rtx);
11000extern rtx gen_divv4sf3 (rtx, rtx, rtx);
11001extern rtx gen_sqrtv8df2 (rtx, rtx);
11002extern rtx gen_sqrtv4df2 (rtx, rtx);
11003extern rtx gen_sqrtv2df2 (rtx, rtx);
11004extern rtx gen_sqrtv16sf2 (rtx, rtx);
11005extern rtx gen_sqrtv8sf2 (rtx, rtx);
11006extern rtx gen_sqrtv4sf2 (rtx, rtx);
11007extern rtx gen_rsqrtv8sf2 (rtx, rtx);
11008extern rtx gen_rsqrtv4sf2 (rtx, rtx);
11009extern rtx gen_rsqrtv16sf2 (rtx, rtx);
11010extern rtx gen_smaxv16sf3 (rtx, rtx, rtx);
11011extern rtx gen_smaxv16sf3_round (rtx, rtx, rtx, rtx);
11012extern rtx gen_smaxv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
11013extern rtx gen_smaxv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11014extern rtx gen_sminv16sf3 (rtx, rtx, rtx);
11015extern rtx gen_sminv16sf3_round (rtx, rtx, rtx, rtx);
11016extern rtx gen_sminv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
11017extern rtx gen_sminv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11018extern rtx gen_smaxv8sf3 (rtx, rtx, rtx);
11019static inline rtx gen_smaxv8sf3_round (rtx, rtx, rtx, rtx);
11020static inline rtx
11021gen_smaxv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11022{
11023 return 0;
11024}
11025extern rtx gen_smaxv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
11026static inline rtx gen_smaxv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11027static inline rtx
11028gen_smaxv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11029{
11030 return 0;
11031}
11032extern rtx gen_sminv8sf3 (rtx, rtx, rtx);
11033static inline rtx gen_sminv8sf3_round (rtx, rtx, rtx, rtx);
11034static inline rtx
11035gen_sminv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11036{
11037 return 0;
11038}
11039extern rtx gen_sminv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
11040static inline rtx gen_sminv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11041static inline rtx
11042gen_sminv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11043{
11044 return 0;
11045}
11046extern rtx gen_smaxv4sf3 (rtx, rtx, rtx);
11047static inline rtx gen_smaxv4sf3_round (rtx, rtx, rtx, rtx);
11048static inline rtx
11049gen_smaxv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11050{
11051 return 0;
11052}
11053extern rtx gen_smaxv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
11054static inline rtx gen_smaxv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11055static inline rtx
11056gen_smaxv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11057{
11058 return 0;
11059}
11060extern rtx gen_sminv4sf3 (rtx, rtx, rtx);
11061static inline rtx gen_sminv4sf3_round (rtx, rtx, rtx, rtx);
11062static inline rtx
11063gen_sminv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11064{
11065 return 0;
11066}
11067extern rtx gen_sminv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
11068static inline rtx gen_sminv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11069static inline rtx
11070gen_sminv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11071{
11072 return 0;
11073}
11074extern rtx gen_smaxv8df3 (rtx, rtx, rtx);
11075extern rtx gen_smaxv8df3_round (rtx, rtx, rtx, rtx);
11076extern rtx gen_smaxv8df3_mask (rtx, rtx, rtx, rtx, rtx);
11077extern rtx gen_smaxv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11078extern rtx gen_sminv8df3 (rtx, rtx, rtx);
11079extern rtx gen_sminv8df3_round (rtx, rtx, rtx, rtx);
11080extern rtx gen_sminv8df3_mask (rtx, rtx, rtx, rtx, rtx);
11081extern rtx gen_sminv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11082extern rtx gen_smaxv4df3 (rtx, rtx, rtx);
11083static inline rtx gen_smaxv4df3_round (rtx, rtx, rtx, rtx);
11084static inline rtx
11085gen_smaxv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11086{
11087 return 0;
11088}
11089extern rtx gen_smaxv4df3_mask (rtx, rtx, rtx, rtx, rtx);
11090static inline rtx gen_smaxv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11091static inline rtx
11092gen_smaxv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11093{
11094 return 0;
11095}
11096extern rtx gen_sminv4df3 (rtx, rtx, rtx);
11097static inline rtx gen_sminv4df3_round (rtx, rtx, rtx, rtx);
11098static inline rtx
11099gen_sminv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11100{
11101 return 0;
11102}
11103extern rtx gen_sminv4df3_mask (rtx, rtx, rtx, rtx, rtx);
11104static inline rtx gen_sminv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11105static inline rtx
11106gen_sminv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11107{
11108 return 0;
11109}
11110extern rtx gen_smaxv2df3 (rtx, rtx, rtx);
11111static inline rtx gen_smaxv2df3_round (rtx, rtx, rtx, rtx);
11112static inline rtx
11113gen_smaxv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11114{
11115 return 0;
11116}
11117extern rtx gen_smaxv2df3_mask (rtx, rtx, rtx, rtx, rtx);
11118static inline rtx gen_smaxv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11119static inline rtx
11120gen_smaxv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11121{
11122 return 0;
11123}
11124extern rtx gen_sminv2df3 (rtx, rtx, rtx);
11125static inline rtx gen_sminv2df3_round (rtx, rtx, rtx, rtx);
11126static inline rtx
11127gen_sminv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
11128{
11129 return 0;
11130}
11131extern rtx gen_sminv2df3_mask (rtx, rtx, rtx, rtx, rtx);
11132static inline rtx gen_sminv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
11133static inline rtx
11134gen_sminv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
11135{
11136 return 0;
11137}
11138extern rtx gen_sse3_haddv2df3 (rtx, rtx, rtx);
11139extern rtx gen_reduc_plus_scal_v8df (rtx, rtx);
11140extern rtx gen_reduc_plus_scal_v4df (rtx, rtx);
11141extern rtx gen_reduc_plus_scal_v2df (rtx, rtx);
11142extern rtx gen_reduc_plus_scal_v16sf (rtx, rtx);
11143extern rtx gen_reduc_plus_scal_v8sf (rtx, rtx);
11144extern rtx gen_reduc_plus_scal_v4sf (rtx, rtx);
11145extern rtx gen_reduc_smax_scal_v32qi (rtx, rtx);
11146extern rtx gen_reduc_smin_scal_v32qi (rtx, rtx);
11147extern rtx gen_reduc_smax_scal_v16hi (rtx, rtx);
11148extern rtx gen_reduc_smin_scal_v16hi (rtx, rtx);
11149extern rtx gen_reduc_smax_scal_v8si (rtx, rtx);
11150extern rtx gen_reduc_smin_scal_v8si (rtx, rtx);
11151extern rtx gen_reduc_smax_scal_v4di (rtx, rtx);
11152extern rtx gen_reduc_smin_scal_v4di (rtx, rtx);
11153extern rtx gen_reduc_smax_scal_v8sf (rtx, rtx);
11154extern rtx gen_reduc_smin_scal_v8sf (rtx, rtx);
11155extern rtx gen_reduc_smax_scal_v4df (rtx, rtx);
11156extern rtx gen_reduc_smin_scal_v4df (rtx, rtx);
11157extern rtx gen_reduc_smax_scal_v4sf (rtx, rtx);
11158extern rtx gen_reduc_smin_scal_v4sf (rtx, rtx);
11159extern rtx gen_reduc_smax_scal_v64qi (rtx, rtx);
11160extern rtx gen_reduc_smin_scal_v64qi (rtx, rtx);
11161extern rtx gen_reduc_smax_scal_v32hi (rtx, rtx);
11162extern rtx gen_reduc_smin_scal_v32hi (rtx, rtx);
11163extern rtx gen_reduc_smax_scal_v16si (rtx, rtx);
11164extern rtx gen_reduc_smin_scal_v16si (rtx, rtx);
11165extern rtx gen_reduc_smax_scal_v8di (rtx, rtx);
11166extern rtx gen_reduc_smin_scal_v8di (rtx, rtx);
11167extern rtx gen_reduc_smax_scal_v16sf (rtx, rtx);
11168extern rtx gen_reduc_smin_scal_v16sf (rtx, rtx);
11169extern rtx gen_reduc_smax_scal_v8df (rtx, rtx);
11170extern rtx gen_reduc_smin_scal_v8df (rtx, rtx);
11171extern rtx gen_reduc_umax_scal_v16si (rtx, rtx);
11172extern rtx gen_reduc_umin_scal_v16si (rtx, rtx);
11173extern rtx gen_reduc_umax_scal_v8di (rtx, rtx);
11174extern rtx gen_reduc_umin_scal_v8di (rtx, rtx);
11175extern rtx gen_reduc_umax_scal_v32hi (rtx, rtx);
11176extern rtx gen_reduc_umin_scal_v32hi (rtx, rtx);
11177extern rtx gen_reduc_umax_scal_v64qi (rtx, rtx);
11178extern rtx gen_reduc_umin_scal_v64qi (rtx, rtx);
11179extern rtx gen_reduc_umax_scal_v32qi (rtx, rtx);
11180extern rtx gen_reduc_umin_scal_v32qi (rtx, rtx);
11181extern rtx gen_reduc_umax_scal_v16hi (rtx, rtx);
11182extern rtx gen_reduc_umin_scal_v16hi (rtx, rtx);
11183extern rtx gen_reduc_umax_scal_v8si (rtx, rtx);
11184extern rtx gen_reduc_umin_scal_v8si (rtx, rtx);
11185extern rtx gen_reduc_umax_scal_v4di (rtx, rtx);
11186extern rtx gen_reduc_umin_scal_v4di (rtx, rtx);
11187extern rtx gen_reduc_umin_scal_v8hi (rtx, rtx);
11188extern rtx gen_vec_cmpv16sihi (rtx, rtx, rtx, rtx);
11189extern rtx gen_vec_cmpv8siqi (rtx, rtx, rtx, rtx);
11190extern rtx gen_vec_cmpv4siqi (rtx, rtx, rtx, rtx);
11191extern rtx gen_vec_cmpv8diqi (rtx, rtx, rtx, rtx);
11192extern rtx gen_vec_cmpv4diqi (rtx, rtx, rtx, rtx);
11193extern rtx gen_vec_cmpv2diqi (rtx, rtx, rtx, rtx);
11194extern rtx gen_vec_cmpv16sfhi (rtx, rtx, rtx, rtx);
11195extern rtx gen_vec_cmpv8sfqi (rtx, rtx, rtx, rtx);
11196extern rtx gen_vec_cmpv4sfqi (rtx, rtx, rtx, rtx);
11197extern rtx gen_vec_cmpv8dfqi (rtx, rtx, rtx, rtx);
11198extern rtx gen_vec_cmpv4dfqi (rtx, rtx, rtx, rtx);
11199extern rtx gen_vec_cmpv2dfqi (rtx, rtx, rtx, rtx);
11200extern rtx gen_vec_cmpv64qidi (rtx, rtx, rtx, rtx);
11201extern rtx gen_vec_cmpv16qihi (rtx, rtx, rtx, rtx);
11202extern rtx gen_vec_cmpv32qisi (rtx, rtx, rtx, rtx);
11203extern rtx gen_vec_cmpv32hisi (rtx, rtx, rtx, rtx);
11204extern rtx gen_vec_cmpv16hihi (rtx, rtx, rtx, rtx);
11205extern rtx gen_vec_cmpv8hiqi (rtx, rtx, rtx, rtx);
11206extern rtx gen_vec_cmpv32qiv32qi (rtx, rtx, rtx, rtx);
11207extern rtx gen_vec_cmpv16hiv16hi (rtx, rtx, rtx, rtx);
11208extern rtx gen_vec_cmpv8siv8si (rtx, rtx, rtx, rtx);
11209extern rtx gen_vec_cmpv4div4di (rtx, rtx, rtx, rtx);
11210extern rtx gen_vec_cmpv16qiv16qi (rtx, rtx, rtx, rtx);
11211extern rtx gen_vec_cmpv8hiv8hi (rtx, rtx, rtx, rtx);
11212extern rtx gen_vec_cmpv4siv4si (rtx, rtx, rtx, rtx);
11213extern rtx gen_vec_cmpv2div2di (rtx, rtx, rtx, rtx);
11214extern rtx gen_vec_cmpv8sfv8si (rtx, rtx, rtx, rtx);
11215extern rtx gen_vec_cmpv4dfv4di (rtx, rtx, rtx, rtx);
11216extern rtx gen_vec_cmpv4sfv4si (rtx, rtx, rtx, rtx);
11217extern rtx gen_vec_cmpv2dfv2di (rtx, rtx, rtx, rtx);
11218extern rtx gen_vec_cmpuv16sihi (rtx, rtx, rtx, rtx);
11219extern rtx gen_vec_cmpuv8siqi (rtx, rtx, rtx, rtx);
11220extern rtx gen_vec_cmpuv4siqi (rtx, rtx, rtx, rtx);
11221extern rtx gen_vec_cmpuv8diqi (rtx, rtx, rtx, rtx);
11222extern rtx gen_vec_cmpuv4diqi (rtx, rtx, rtx, rtx);
11223extern rtx gen_vec_cmpuv2diqi (rtx, rtx, rtx, rtx);
11224extern rtx gen_vec_cmpuv64qidi (rtx, rtx, rtx, rtx);
11225extern rtx gen_vec_cmpuv16qihi (rtx, rtx, rtx, rtx);
11226extern rtx gen_vec_cmpuv32qisi (rtx, rtx, rtx, rtx);
11227extern rtx gen_vec_cmpuv32hisi (rtx, rtx, rtx, rtx);
11228extern rtx gen_vec_cmpuv16hihi (rtx, rtx, rtx, rtx);
11229extern rtx gen_vec_cmpuv8hiqi (rtx, rtx, rtx, rtx);
11230extern rtx gen_vec_cmpuv32qiv32qi (rtx, rtx, rtx, rtx);
11231extern rtx gen_vec_cmpuv16hiv16hi (rtx, rtx, rtx, rtx);
11232extern rtx gen_vec_cmpuv8siv8si (rtx, rtx, rtx, rtx);
11233extern rtx gen_vec_cmpuv4div4di (rtx, rtx, rtx, rtx);
11234extern rtx gen_vec_cmpuv16qiv16qi (rtx, rtx, rtx, rtx);
11235extern rtx gen_vec_cmpuv8hiv8hi (rtx, rtx, rtx, rtx);
11236extern rtx gen_vec_cmpuv4siv4si (rtx, rtx, rtx, rtx);
11237extern rtx gen_vec_cmpuv2div2di (rtx, rtx, rtx, rtx);
11238extern rtx gen_vec_cmpeqv2div2di (rtx, rtx, rtx, rtx);
11239extern rtx gen_vcondv64qiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11240extern rtx gen_vcondv32hiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11241extern rtx gen_vcondv16siv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11242extern rtx gen_vcondv8div16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11243extern rtx gen_vcondv16sfv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11244extern rtx gen_vcondv8dfv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
11245extern rtx gen_vcondv64qiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
11246extern rtx gen_vcondv32hiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
11247extern rtx gen_vcondv16siv8df (rtx, rtx, rtx, rtx, rtx, rtx);
11248extern rtx gen_vcondv8div8df (rtx, rtx, rtx, rtx, rtx, rtx);
11249extern rtx gen_vcondv16sfv8df (rtx, rtx, rtx, rtx, rtx, rtx);
11250extern rtx gen_vcondv8dfv8df (rtx, rtx, rtx, rtx, rtx, rtx);
11251extern rtx gen_vcondv32qiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11252extern rtx gen_vcondv32qiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
11253extern rtx gen_vcondv16hiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11254extern rtx gen_vcondv16hiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
11255extern rtx gen_vcondv8siv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11256extern rtx gen_vcondv8siv4df (rtx, rtx, rtx, rtx, rtx, rtx);
11257extern rtx gen_vcondv4div8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11258extern rtx gen_vcondv4div4df (rtx, rtx, rtx, rtx, rtx, rtx);
11259extern rtx gen_vcondv8sfv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11260extern rtx gen_vcondv8sfv4df (rtx, rtx, rtx, rtx, rtx, rtx);
11261extern rtx gen_vcondv4dfv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
11262extern rtx gen_vcondv4dfv4df (rtx, rtx, rtx, rtx, rtx, rtx);
11263extern rtx gen_vcondv16qiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11264extern rtx gen_vcondv16qiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
11265extern rtx gen_vcondv8hiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11266extern rtx gen_vcondv8hiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
11267extern rtx gen_vcondv4siv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11268extern rtx gen_vcondv4siv2df (rtx, rtx, rtx, rtx, rtx, rtx);
11269extern rtx gen_vcondv2div4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11270extern rtx gen_vcondv2div2df (rtx, rtx, rtx, rtx, rtx, rtx);
11271extern rtx gen_vcondv4sfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11272extern rtx gen_vcondv4sfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
11273extern rtx gen_vcondv2dfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
11274extern rtx gen_vcondv2dfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
11275extern rtx gen_vcond_mask_v16sihi (rtx, rtx, rtx, rtx);
11276extern rtx gen_vcond_mask_v8siqi (rtx, rtx, rtx, rtx);
11277extern rtx gen_vcond_mask_v4siqi (rtx, rtx, rtx, rtx);
11278extern rtx gen_vcond_mask_v8diqi (rtx, rtx, rtx, rtx);
11279extern rtx gen_vcond_mask_v4diqi (rtx, rtx, rtx, rtx);
11280extern rtx gen_vcond_mask_v2diqi (rtx, rtx, rtx, rtx);
11281extern rtx gen_vcond_mask_v16sfhi (rtx, rtx, rtx, rtx);
11282extern rtx gen_vcond_mask_v8sfqi (rtx, rtx, rtx, rtx);
11283extern rtx gen_vcond_mask_v4sfqi (rtx, rtx, rtx, rtx);
11284extern rtx gen_vcond_mask_v8dfqi (rtx, rtx, rtx, rtx);
11285extern rtx gen_vcond_mask_v4dfqi (rtx, rtx, rtx, rtx);
11286extern rtx gen_vcond_mask_v2dfqi (rtx, rtx, rtx, rtx);
11287extern rtx gen_vcond_mask_v64qidi (rtx, rtx, rtx, rtx);
11288extern rtx gen_vcond_mask_v16qihi (rtx, rtx, rtx, rtx);
11289extern rtx gen_vcond_mask_v32qisi (rtx, rtx, rtx, rtx);
11290extern rtx gen_vcond_mask_v32hisi (rtx, rtx, rtx, rtx);
11291extern rtx gen_vcond_mask_v16hihi (rtx, rtx, rtx, rtx);
11292extern rtx gen_vcond_mask_v8hiqi (rtx, rtx, rtx, rtx);
11293extern rtx gen_vcond_mask_v32qiv32qi (rtx, rtx, rtx, rtx);
11294extern rtx gen_vcond_mask_v16hiv16hi (rtx, rtx, rtx, rtx);
11295extern rtx gen_vcond_mask_v8siv8si (rtx, rtx, rtx, rtx);
11296extern rtx gen_vcond_mask_v4div4di (rtx, rtx, rtx, rtx);
11297extern rtx gen_vcond_mask_v16qiv16qi (rtx, rtx, rtx, rtx);
11298extern rtx gen_vcond_mask_v8hiv8hi (rtx, rtx, rtx, rtx);
11299extern rtx gen_vcond_mask_v4siv4si (rtx, rtx, rtx, rtx);
11300extern rtx gen_vcond_mask_v2div2di (rtx, rtx, rtx, rtx);
11301extern rtx gen_vcond_mask_v8sfv8si (rtx, rtx, rtx, rtx);
11302extern rtx gen_vcond_mask_v4dfv4di (rtx, rtx, rtx, rtx);
11303extern rtx gen_vcond_mask_v4sfv4si (rtx, rtx, rtx, rtx);
11304extern rtx gen_vcond_mask_v2dfv2di (rtx, rtx, rtx, rtx);
11305extern rtx gen_andv8sf3 (rtx, rtx, rtx);
11306extern rtx gen_andv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
11307extern rtx gen_iorv8sf3 (rtx, rtx, rtx);
11308extern rtx gen_iorv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
11309extern rtx gen_xorv8sf3 (rtx, rtx, rtx);
11310extern rtx gen_xorv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
11311extern rtx gen_andv4sf3 (rtx, rtx, rtx);
11312extern rtx gen_andv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
11313extern rtx gen_iorv4sf3 (rtx, rtx, rtx);
11314extern rtx gen_iorv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
11315extern rtx gen_xorv4sf3 (rtx, rtx, rtx);
11316extern rtx gen_xorv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
11317extern rtx gen_andv4df3 (rtx, rtx, rtx);
11318extern rtx gen_andv4df3_mask (rtx, rtx, rtx, rtx, rtx);
11319extern rtx gen_iorv4df3 (rtx, rtx, rtx);
11320extern rtx gen_iorv4df3_mask (rtx, rtx, rtx, rtx, rtx);
11321extern rtx gen_xorv4df3 (rtx, rtx, rtx);
11322extern rtx gen_xorv4df3_mask (rtx, rtx, rtx, rtx, rtx);
11323extern rtx gen_andv2df3 (rtx, rtx, rtx);
11324extern rtx gen_andv2df3_mask (rtx, rtx, rtx, rtx, rtx);
11325extern rtx gen_iorv2df3 (rtx, rtx, rtx);
11326extern rtx gen_iorv2df3_mask (rtx, rtx, rtx, rtx, rtx);
11327extern rtx gen_xorv2df3 (rtx, rtx, rtx);
11328extern rtx gen_xorv2df3_mask (rtx, rtx, rtx, rtx, rtx);
11329extern rtx gen_andv16sf3 (rtx, rtx, rtx);
11330extern rtx gen_andv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
11331extern rtx gen_iorv16sf3 (rtx, rtx, rtx);
11332extern rtx gen_iorv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
11333extern rtx gen_xorv16sf3 (rtx, rtx, rtx);
11334extern rtx gen_xorv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
11335extern rtx gen_andv8df3 (rtx, rtx, rtx);
11336extern rtx gen_andv8df3_mask (rtx, rtx, rtx, rtx, rtx);
11337extern rtx gen_iorv8df3 (rtx, rtx, rtx);
11338extern rtx gen_iorv8df3_mask (rtx, rtx, rtx, rtx, rtx);
11339extern rtx gen_xorv8df3 (rtx, rtx, rtx);
11340extern rtx gen_xorv8df3_mask (rtx, rtx, rtx, rtx, rtx);
11341extern rtx gen_copysignv16sf3 (rtx, rtx, rtx);
11342extern rtx gen_copysignv8sf3 (rtx, rtx, rtx);
11343extern rtx gen_copysignv4sf3 (rtx, rtx, rtx);
11344extern rtx gen_copysignv8df3 (rtx, rtx, rtx);
11345extern rtx gen_copysignv4df3 (rtx, rtx, rtx);
11346extern rtx gen_copysignv2df3 (rtx, rtx, rtx);
11347extern rtx gen_andtf3 (rtx, rtx, rtx);
11348extern rtx gen_iortf3 (rtx, rtx, rtx);
11349extern rtx gen_xortf3 (rtx, rtx, rtx);
11350extern rtx gen_fmasf4 (rtx, rtx, rtx, rtx);
11351extern rtx gen_fmadf4 (rtx, rtx, rtx, rtx);
11352extern rtx gen_fmav4sf4 (rtx, rtx, rtx, rtx);
11353extern rtx gen_fmav2df4 (rtx, rtx, rtx, rtx);
11354extern rtx gen_fmav8sf4 (rtx, rtx, rtx, rtx);
11355extern rtx gen_fmav4df4 (rtx, rtx, rtx, rtx);
11356extern rtx gen_fmav16sf4 (rtx, rtx, rtx, rtx);
11357extern rtx gen_fmav8df4 (rtx, rtx, rtx, rtx);
11358extern rtx gen_fmssf4 (rtx, rtx, rtx, rtx);
11359extern rtx gen_fmsdf4 (rtx, rtx, rtx, rtx);
11360extern rtx gen_fmsv4sf4 (rtx, rtx, rtx, rtx);
11361extern rtx gen_fmsv2df4 (rtx, rtx, rtx, rtx);
11362extern rtx gen_fmsv8sf4 (rtx, rtx, rtx, rtx);
11363extern rtx gen_fmsv4df4 (rtx, rtx, rtx, rtx);
11364extern rtx gen_fmsv16sf4 (rtx, rtx, rtx, rtx);
11365extern rtx gen_fmsv8df4 (rtx, rtx, rtx, rtx);
11366extern rtx gen_fnmasf4 (rtx, rtx, rtx, rtx);
11367extern rtx gen_fnmadf4 (rtx, rtx, rtx, rtx);
11368extern rtx gen_fnmav4sf4 (rtx, rtx, rtx, rtx);
11369extern rtx gen_fnmav2df4 (rtx, rtx, rtx, rtx);
11370extern rtx gen_fnmav8sf4 (rtx, rtx, rtx, rtx);
11371extern rtx gen_fnmav4df4 (rtx, rtx, rtx, rtx);
11372extern rtx gen_fnmav16sf4 (rtx, rtx, rtx, rtx);
11373extern rtx gen_fnmav8df4 (rtx, rtx, rtx, rtx);
11374extern rtx gen_fnmssf4 (rtx, rtx, rtx, rtx);
11375extern rtx gen_fnmsdf4 (rtx, rtx, rtx, rtx);
11376extern rtx gen_fnmsv4sf4 (rtx, rtx, rtx, rtx);
11377extern rtx gen_fnmsv2df4 (rtx, rtx, rtx, rtx);
11378extern rtx gen_fnmsv8sf4 (rtx, rtx, rtx, rtx);
11379extern rtx gen_fnmsv4df4 (rtx, rtx, rtx, rtx);
11380extern rtx gen_fnmsv16sf4 (rtx, rtx, rtx, rtx);
11381extern rtx gen_fnmsv8df4 (rtx, rtx, rtx, rtx);
11382extern rtx gen_fma4i_fmadd_sf (rtx, rtx, rtx, rtx);
11383extern rtx gen_fma4i_fmadd_df (rtx, rtx, rtx, rtx);
11384extern rtx gen_fma4i_fmadd_v4sf (rtx, rtx, rtx, rtx);
11385extern rtx gen_fma4i_fmadd_v2df (rtx, rtx, rtx, rtx);
11386extern rtx gen_fma4i_fmadd_v8sf (rtx, rtx, rtx, rtx);
11387extern rtx gen_fma4i_fmadd_v4df (rtx, rtx, rtx, rtx);
11388extern rtx gen_fma4i_fmadd_v16sf (rtx, rtx, rtx, rtx);
11389extern rtx gen_fma4i_fmadd_v8df (rtx, rtx, rtx, rtx);
11390extern rtx gen_avx512f_fmadd_v16sf_maskz (rtx, rtx, rtx, rtx, rtx);
11391extern rtx gen_avx512f_fmadd_v16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11392extern rtx gen_avx512vl_fmadd_v8sf_maskz (rtx, rtx, rtx, rtx, rtx);
11393extern rtx gen_avx512vl_fmadd_v8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11394extern rtx gen_avx512vl_fmadd_v4sf_maskz (rtx, rtx, rtx, rtx, rtx);
11395extern rtx gen_avx512vl_fmadd_v4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11396extern rtx gen_avx512f_fmadd_v8df_maskz (rtx, rtx, rtx, rtx, rtx);
11397extern rtx gen_avx512f_fmadd_v8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11398extern rtx gen_avx512vl_fmadd_v4df_maskz (rtx, rtx, rtx, rtx, rtx);
11399extern rtx gen_avx512vl_fmadd_v4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11400extern rtx gen_avx512vl_fmadd_v2df_maskz (rtx, rtx, rtx, rtx, rtx);
11401extern rtx gen_avx512vl_fmadd_v2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11402extern rtx gen_fmaddsub_v16sf (rtx, rtx, rtx, rtx);
11403extern rtx gen_fmaddsub_v8sf (rtx, rtx, rtx, rtx);
11404extern rtx gen_fmaddsub_v4sf (rtx, rtx, rtx, rtx);
11405extern rtx gen_fmaddsub_v8df (rtx, rtx, rtx, rtx);
11406extern rtx gen_fmaddsub_v4df (rtx, rtx, rtx, rtx);
11407extern rtx gen_fmaddsub_v2df (rtx, rtx, rtx, rtx);
11408extern rtx gen_avx512f_fmaddsub_v16sf_maskz (rtx, rtx, rtx, rtx, rtx);
11409extern rtx gen_avx512f_fmaddsub_v16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11410extern rtx gen_avx512vl_fmaddsub_v8sf_maskz (rtx, rtx, rtx, rtx, rtx);
11411extern rtx gen_avx512vl_fmaddsub_v8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11412extern rtx gen_avx512vl_fmaddsub_v4sf_maskz (rtx, rtx, rtx, rtx, rtx);
11413extern rtx gen_avx512vl_fmaddsub_v4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11414extern rtx gen_avx512f_fmaddsub_v8df_maskz (rtx, rtx, rtx, rtx, rtx);
11415extern rtx gen_avx512f_fmaddsub_v8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11416extern rtx gen_avx512vl_fmaddsub_v4df_maskz (rtx, rtx, rtx, rtx, rtx);
11417extern rtx gen_avx512vl_fmaddsub_v4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11418extern rtx gen_avx512vl_fmaddsub_v2df_maskz (rtx, rtx, rtx, rtx, rtx);
11419extern rtx gen_avx512vl_fmaddsub_v2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
11420extern rtx gen_fmai_vmfmadd_v4sf (rtx, rtx, rtx, rtx);
11421extern rtx gen_fmai_vmfmadd_v4sf_round (rtx, rtx, rtx, rtx, rtx);
11422extern rtx gen_fmai_vmfmadd_v2df (rtx, rtx, rtx, rtx);
11423extern rtx gen_fmai_vmfmadd_v2df_round (rtx, rtx, rtx, rtx, rtx);
11424extern rtx gen_fma4i_vmfmadd_v4sf (rtx, rtx, rtx, rtx);
11425extern rtx gen_fma4i_vmfmadd_v2df (rtx, rtx, rtx, rtx);
11426extern rtx gen_floatunsv16siv16sf2 (rtx, rtx);
11427extern rtx gen_floatunsv8siv8sf2 (rtx, rtx);
11428extern rtx gen_floatunsv4siv4sf2 (rtx, rtx);
11429extern rtx gen_fixuns_truncv16sfv16si2 (rtx, rtx);
11430extern rtx gen_fixuns_truncv8sfv8si2 (rtx, rtx);
11431extern rtx gen_fixuns_truncv4sfv4si2 (rtx, rtx);
11432extern rtx gen_avx_cvtpd2dq256_2 (rtx, rtx);
11433extern rtx gen_avx_cvttpd2dq256_2 (rtx, rtx);
11434extern rtx gen_sse2_cvtpd2ps (rtx, rtx);
11435extern rtx gen_sse2_cvtpd2ps_mask (rtx, rtx, rtx, rtx);
11436extern rtx gen_avx512bw_cvtmask2bv64qi (rtx, rtx);
11437extern rtx gen_avx512vl_cvtmask2bv16qi (rtx, rtx);
11438extern rtx gen_avx512vl_cvtmask2bv32qi (rtx, rtx);
11439extern rtx gen_avx512bw_cvtmask2wv32hi (rtx, rtx);
11440extern rtx gen_avx512vl_cvtmask2wv16hi (rtx, rtx);
11441extern rtx gen_avx512vl_cvtmask2wv8hi (rtx, rtx);
11442extern rtx gen_avx512f_cvtmask2dv16si (rtx, rtx);
11443extern rtx gen_avx512vl_cvtmask2dv8si (rtx, rtx);
11444extern rtx gen_avx512vl_cvtmask2dv4si (rtx, rtx);
11445extern rtx gen_avx512f_cvtmask2qv8di (rtx, rtx);
11446extern rtx gen_avx512vl_cvtmask2qv4di (rtx, rtx);
11447extern rtx gen_avx512vl_cvtmask2qv2di (rtx, rtx);
11448extern rtx gen_vec_unpacks_hi_v4sf (rtx, rtx);
11449extern rtx gen_vec_unpacks_hi_v8sf (rtx, rtx);
11450extern rtx gen_vec_unpacks_hi_v16sf (rtx, rtx);
11451extern rtx gen_vec_unpacks_lo_v4sf (rtx, rtx);
11452extern rtx gen_vec_unpacks_lo_v8sf (rtx, rtx);
11453extern rtx gen_vec_unpacks_float_hi_v32hi (rtx, rtx);
11454extern rtx gen_vec_unpacks_float_hi_v16hi (rtx, rtx);
11455extern rtx gen_vec_unpacks_float_hi_v8hi (rtx, rtx);
11456extern rtx gen_vec_unpacks_float_lo_v32hi (rtx, rtx);
11457extern rtx gen_vec_unpacks_float_lo_v16hi (rtx, rtx);
11458extern rtx gen_vec_unpacks_float_lo_v8hi (rtx, rtx);
11459extern rtx gen_vec_unpacku_float_hi_v32hi (rtx, rtx);
11460extern rtx gen_vec_unpacku_float_hi_v16hi (rtx, rtx);
11461extern rtx gen_vec_unpacku_float_hi_v8hi (rtx, rtx);
11462extern rtx gen_vec_unpacku_float_lo_v32hi (rtx, rtx);
11463extern rtx gen_vec_unpacku_float_lo_v16hi (rtx, rtx);
11464extern rtx gen_vec_unpacku_float_lo_v8hi (rtx, rtx);
11465extern rtx gen_vec_unpacks_float_hi_v4si (rtx, rtx);
11466extern rtx gen_vec_unpacks_float_lo_v4si (rtx, rtx);
11467extern rtx gen_vec_unpacks_float_hi_v8si (rtx, rtx);
11468extern rtx gen_vec_unpacks_float_lo_v8si (rtx, rtx);
11469extern rtx gen_vec_unpacks_float_hi_v16si (rtx, rtx);
11470extern rtx gen_vec_unpacks_float_lo_v16si (rtx, rtx);
11471extern rtx gen_vec_unpacku_float_hi_v4si (rtx, rtx);
11472extern rtx gen_vec_unpacku_float_lo_v4si (rtx, rtx);
11473extern rtx gen_vec_unpacku_float_hi_v8si (rtx, rtx);
11474extern rtx gen_vec_unpacku_float_hi_v16si (rtx, rtx);
11475extern rtx gen_vec_unpacku_float_lo_v8si (rtx, rtx);
11476extern rtx gen_vec_unpacku_float_lo_v16si (rtx, rtx);
11477extern rtx gen_vec_pack_trunc_v8df (rtx, rtx, rtx);
11478extern rtx gen_vec_pack_trunc_v4df (rtx, rtx, rtx);
11479extern rtx gen_vec_pack_trunc_v2df (rtx, rtx, rtx);
11480extern rtx gen_vec_pack_sfix_trunc_v8df (rtx, rtx, rtx);
11481extern rtx gen_vec_pack_sfix_trunc_v4df (rtx, rtx, rtx);
11482extern rtx gen_vec_pack_sfix_trunc_v2df (rtx, rtx, rtx);
11483extern rtx gen_vec_pack_ufix_trunc_v8df (rtx, rtx, rtx);
11484extern rtx gen_vec_pack_ufix_trunc_v4df (rtx, rtx, rtx);
11485extern rtx gen_vec_pack_ufix_trunc_v2df (rtx, rtx, rtx);
11486extern rtx gen_avx512f_vec_pack_sfix_v8df (rtx, rtx, rtx);
11487extern rtx gen_vec_pack_sfix_v4df (rtx, rtx, rtx);
11488extern rtx gen_vec_pack_sfix_v2df (rtx, rtx, rtx);
11489extern rtx gen_sse_movhlps_exp (rtx, rtx, rtx);
11490extern rtx gen_sse_movlhps_exp (rtx, rtx, rtx);
11491extern rtx gen_vec_interleave_highv8sf (rtx, rtx, rtx);
11492extern rtx gen_vec_interleave_lowv8sf (rtx, rtx, rtx);
11493extern rtx gen_avx_shufps256 (rtx, rtx, rtx, rtx);
11494extern rtx gen_avx_shufps256_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11495extern rtx gen_sse_shufps (rtx, rtx, rtx, rtx);
11496extern rtx gen_sse_shufps_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11497extern rtx gen_sse_loadhps_exp (rtx, rtx, rtx);
11498extern rtx gen_sse_loadlps_exp (rtx, rtx, rtx);
11499extern rtx gen_vec_initv16qi (rtx, rtx);
11500extern rtx gen_vec_initv8hi (rtx, rtx);
11501extern rtx gen_vec_initv4si (rtx, rtx);
11502extern rtx gen_vec_initv2di (rtx, rtx);
11503extern rtx gen_vec_initv4sf (rtx, rtx);
11504extern rtx gen_vec_initv2df (rtx, rtx);
11505extern rtx gen_vec_setv32qi (rtx, rtx, rtx);
11506extern rtx gen_vec_setv16qi (rtx, rtx, rtx);
11507extern rtx gen_vec_setv16hi (rtx, rtx, rtx);
11508extern rtx gen_vec_setv8hi (rtx, rtx, rtx);
11509extern rtx gen_vec_setv16si (rtx, rtx, rtx);
11510extern rtx gen_vec_setv8si (rtx, rtx, rtx);
11511extern rtx gen_vec_setv4si (rtx, rtx, rtx);
11512extern rtx gen_vec_setv8di (rtx, rtx, rtx);
11513extern rtx gen_vec_setv4di (rtx, rtx, rtx);
11514extern rtx gen_vec_setv2di (rtx, rtx, rtx);
11515extern rtx gen_vec_setv16sf (rtx, rtx, rtx);
11516extern rtx gen_vec_setv8sf (rtx, rtx, rtx);
11517extern rtx gen_vec_setv4sf (rtx, rtx, rtx);
11518extern rtx gen_vec_setv8df (rtx, rtx, rtx);
11519extern rtx gen_vec_setv4df (rtx, rtx, rtx);
11520extern rtx gen_vec_setv2df (rtx, rtx, rtx);
11521extern rtx gen_avx512dq_vextractf64x2_mask (rtx, rtx, rtx, rtx, rtx);
11522extern rtx gen_avx512dq_vextracti64x2_mask (rtx, rtx, rtx, rtx, rtx);
11523extern rtx gen_avx512f_vextractf32x4_mask (rtx, rtx, rtx, rtx, rtx);
11524extern rtx gen_avx512f_vextracti32x4_mask (rtx, rtx, rtx, rtx, rtx);
11525extern rtx gen_avx512dq_vextractf32x8_mask (rtx, rtx, rtx, rtx, rtx);
11526extern rtx gen_avx512dq_vextracti32x8_mask (rtx, rtx, rtx, rtx, rtx);
11527extern rtx gen_avx512f_vextractf64x4_mask (rtx, rtx, rtx, rtx, rtx);
11528extern rtx gen_avx512f_vextracti64x4_mask (rtx, rtx, rtx, rtx, rtx);
11529extern rtx gen_avx512vl_vextractf128v8si (rtx, rtx, rtx, rtx, rtx);
11530extern rtx gen_avx512vl_vextractf128v8sf (rtx, rtx, rtx, rtx, rtx);
11531extern rtx gen_avx512vl_vextractf128v4di (rtx, rtx, rtx, rtx, rtx);
11532extern rtx gen_avx512vl_vextractf128v4df (rtx, rtx, rtx, rtx, rtx);
11533extern rtx gen_avx_vextractf128v32qi (rtx, rtx, rtx);
11534extern rtx gen_avx_vextractf128v16hi (rtx, rtx, rtx);
11535extern rtx gen_avx_vextractf128v8si (rtx, rtx, rtx);
11536extern rtx gen_avx_vextractf128v4di (rtx, rtx, rtx);
11537extern rtx gen_avx_vextractf128v8sf (rtx, rtx, rtx);
11538extern rtx gen_avx_vextractf128v4df (rtx, rtx, rtx);
11539extern rtx gen_vec_extractv64qi (rtx, rtx, rtx);
11540extern rtx gen_vec_extractv32qi (rtx, rtx, rtx);
11541extern rtx gen_vec_extractv16qi (rtx, rtx, rtx);
11542extern rtx gen_vec_extractv32hi (rtx, rtx, rtx);
11543extern rtx gen_vec_extractv16hi (rtx, rtx, rtx);
11544extern rtx gen_vec_extractv8hi (rtx, rtx, rtx);
11545extern rtx gen_vec_extractv16si (rtx, rtx, rtx);
11546extern rtx gen_vec_extractv8si (rtx, rtx, rtx);
11547extern rtx gen_vec_extractv4si (rtx, rtx, rtx);
11548extern rtx gen_vec_extractv8di (rtx, rtx, rtx);
11549extern rtx gen_vec_extractv4di (rtx, rtx, rtx);
11550extern rtx gen_vec_extractv2di (rtx, rtx, rtx);
11551extern rtx gen_vec_extractv16sf (rtx, rtx, rtx);
11552extern rtx gen_vec_extractv8sf (rtx, rtx, rtx);
11553extern rtx gen_vec_extractv4sf (rtx, rtx, rtx);
11554extern rtx gen_vec_extractv8df (rtx, rtx, rtx);
11555extern rtx gen_vec_extractv4df (rtx, rtx, rtx);
11556extern rtx gen_vec_extractv2df (rtx, rtx, rtx);
11557extern rtx gen_vec_interleave_highv4df (rtx, rtx, rtx);
11558extern rtx gen_vec_interleave_highv2df (rtx, rtx, rtx);
11559extern rtx gen_avx512f_movddup512 (rtx, rtx);
11560extern rtx gen_avx512f_movddup512_mask (rtx, rtx, rtx, rtx);
11561extern rtx gen_avx512f_unpcklpd512 (rtx, rtx, rtx);
11562extern rtx gen_avx512f_unpcklpd512_mask (rtx, rtx, rtx, rtx, rtx);
11563extern rtx gen_avx_movddup256 (rtx, rtx);
11564extern rtx gen_avx_movddup256_mask (rtx, rtx, rtx, rtx);
11565extern rtx gen_avx_unpcklpd256 (rtx, rtx, rtx);
11566extern rtx gen_avx_unpcklpd256_mask (rtx, rtx, rtx, rtx, rtx);
11567extern rtx gen_vec_interleave_lowv4df (rtx, rtx, rtx);
11568extern rtx gen_vec_interleave_lowv2df (rtx, rtx, rtx);
11569extern rtx gen_avx512f_vternlogv16si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11570extern rtx gen_avx512vl_vternlogv8si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11571extern rtx gen_avx512vl_vternlogv4si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11572extern rtx gen_avx512f_vternlogv8di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11573extern rtx gen_avx512vl_vternlogv4di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11574extern rtx gen_avx512vl_vternlogv2di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11575extern rtx gen_avx512f_shufps512_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11576extern rtx gen_avx512f_fixupimmv16sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11577extern rtx gen_avx512f_fixupimmv16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11578extern rtx gen_avx512vl_fixupimmv8sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11579extern rtx gen_avx512vl_fixupimmv8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11580extern rtx gen_avx512vl_fixupimmv4sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11581extern rtx gen_avx512vl_fixupimmv4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11582extern rtx gen_avx512f_fixupimmv8df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11583extern rtx gen_avx512f_fixupimmv8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11584extern rtx gen_avx512vl_fixupimmv4df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11585extern rtx gen_avx512vl_fixupimmv4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11586extern rtx gen_avx512vl_fixupimmv2df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11587extern rtx gen_avx512vl_fixupimmv2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11588extern rtx gen_avx512f_sfixupimmv4sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11589extern rtx gen_avx512f_sfixupimmv4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11590extern rtx gen_avx512f_sfixupimmv2df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
11591extern rtx gen_avx512f_sfixupimmv2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
11592extern rtx gen_avx512f_shufpd512_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11593extern rtx gen_avx_shufpd256 (rtx, rtx, rtx, rtx);
11594extern rtx gen_avx_shufpd256_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11595extern rtx gen_sse2_shufpd (rtx, rtx, rtx, rtx);
11596extern rtx gen_sse2_shufpd_mask (rtx, rtx, rtx, rtx, rtx, rtx);
11597extern rtx gen_sse2_loadhpd_exp (rtx, rtx, rtx);
11598extern rtx gen_sse2_loadlpd_exp (rtx, rtx, rtx);
11599extern rtx gen_avx512f_ss_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
11600extern rtx gen_avx512f_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
11601extern rtx gen_avx512f_us_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
11602extern rtx gen_avx512f_ss_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
11603extern rtx gen_avx512f_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
11604extern rtx gen_avx512f_us_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
11605extern rtx gen_avx512f_ss_truncatev8div8si2_mask_store (rtx, rtx, rtx);
11606extern rtx gen_avx512f_truncatev8div8si2_mask_store (rtx, rtx, rtx);
11607extern rtx gen_avx512f_us_truncatev8div8si2_mask_store (rtx, rtx, rtx);
11608extern rtx gen_avx512f_ss_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
11609extern rtx gen_avx512f_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
11610extern rtx gen_avx512f_us_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
11611extern rtx gen_avx512bw_ss_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
11612extern rtx gen_avx512bw_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
11613extern rtx gen_avx512bw_us_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
11614extern rtx gen_avx512vl_ss_truncatev4div4si2_mask_store (rtx, rtx, rtx);
11615extern rtx gen_avx512vl_truncatev4div4si2_mask_store (rtx, rtx, rtx);
11616extern rtx gen_avx512vl_us_truncatev4div4si2_mask_store (rtx, rtx, rtx);
11617extern rtx gen_avx512vl_ss_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
11618extern rtx gen_avx512vl_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
11619extern rtx gen_avx512vl_us_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
11620extern rtx gen_avx512vl_ss_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
11621extern rtx gen_avx512vl_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
11622extern rtx gen_avx512vl_us_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
11623extern rtx gen_negv64qi2 (rtx, rtx);
11624extern rtx gen_negv32qi2 (rtx, rtx);
11625extern rtx gen_negv16qi2 (rtx, rtx);
11626extern rtx gen_negv32hi2 (rtx, rtx);
11627extern rtx gen_negv16hi2 (rtx, rtx);
11628extern rtx gen_negv8hi2 (rtx, rtx);
11629extern rtx gen_negv16si2 (rtx, rtx);
11630extern rtx gen_negv8si2 (rtx, rtx);
11631extern rtx gen_negv4si2 (rtx, rtx);
11632extern rtx gen_negv8di2 (rtx, rtx);
11633extern rtx gen_negv4di2 (rtx, rtx);
11634extern rtx gen_negv2di2 (rtx, rtx);
11635extern rtx gen_addv64qi3 (rtx, rtx, rtx);
11636extern rtx gen_subv64qi3 (rtx, rtx, rtx);
11637extern rtx gen_addv32qi3 (rtx, rtx, rtx);
11638extern rtx gen_subv32qi3 (rtx, rtx, rtx);
11639extern rtx gen_addv16qi3 (rtx, rtx, rtx);
11640extern rtx gen_subv16qi3 (rtx, rtx, rtx);
11641extern rtx gen_addv32hi3 (rtx, rtx, rtx);
11642extern rtx gen_subv32hi3 (rtx, rtx, rtx);
11643extern rtx gen_addv16hi3 (rtx, rtx, rtx);
11644extern rtx gen_subv16hi3 (rtx, rtx, rtx);
11645extern rtx gen_addv8hi3 (rtx, rtx, rtx);
11646extern rtx gen_subv8hi3 (rtx, rtx, rtx);
11647extern rtx gen_addv16si3 (rtx, rtx, rtx);
11648extern rtx gen_subv16si3 (rtx, rtx, rtx);
11649extern rtx gen_addv8si3 (rtx, rtx, rtx);
11650extern rtx gen_subv8si3 (rtx, rtx, rtx);
11651extern rtx gen_addv4si3 (rtx, rtx, rtx);
11652extern rtx gen_subv4si3 (rtx, rtx, rtx);
11653extern rtx gen_addv8di3 (rtx, rtx, rtx);
11654extern rtx gen_subv8di3 (rtx, rtx, rtx);
11655extern rtx gen_addv4di3 (rtx, rtx, rtx);
11656extern rtx gen_subv4di3 (rtx, rtx, rtx);
11657extern rtx gen_addv2di3 (rtx, rtx, rtx);
11658extern rtx gen_subv2di3 (rtx, rtx, rtx);
11659extern rtx gen_addv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11660extern rtx gen_subv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11661extern rtx gen_addv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11662extern rtx gen_subv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11663extern rtx gen_addv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11664extern rtx gen_subv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11665extern rtx gen_addv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11666extern rtx gen_subv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11667extern rtx gen_addv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11668extern rtx gen_subv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11669extern rtx gen_addv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11670extern rtx gen_subv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11671extern rtx gen_addv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11672extern rtx gen_subv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11673extern rtx gen_addv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11674extern rtx gen_subv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11675extern rtx gen_addv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11676extern rtx gen_subv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11677extern rtx gen_addv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11678extern rtx gen_subv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11679extern rtx gen_addv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11680extern rtx gen_subv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11681extern rtx gen_addv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11682extern rtx gen_subv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11683extern rtx gen_avx512bw_ssaddv64qi3 (rtx, rtx, rtx);
11684extern rtx gen_avx512bw_ssaddv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11685extern rtx gen_avx512bw_usaddv64qi3 (rtx, rtx, rtx);
11686extern rtx gen_avx512bw_usaddv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11687extern rtx gen_avx512bw_sssubv64qi3 (rtx, rtx, rtx);
11688extern rtx gen_avx512bw_sssubv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11689extern rtx gen_avx512bw_ussubv64qi3 (rtx, rtx, rtx);
11690extern rtx gen_avx512bw_ussubv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11691extern rtx gen_avx2_ssaddv32qi3 (rtx, rtx, rtx);
11692extern rtx gen_avx2_ssaddv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11693extern rtx gen_avx2_usaddv32qi3 (rtx, rtx, rtx);
11694extern rtx gen_avx2_usaddv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11695extern rtx gen_avx2_sssubv32qi3 (rtx, rtx, rtx);
11696extern rtx gen_avx2_sssubv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11697extern rtx gen_avx2_ussubv32qi3 (rtx, rtx, rtx);
11698extern rtx gen_avx2_ussubv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11699extern rtx gen_sse2_ssaddv16qi3 (rtx, rtx, rtx);
11700extern rtx gen_sse2_ssaddv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11701extern rtx gen_sse2_usaddv16qi3 (rtx, rtx, rtx);
11702extern rtx gen_sse2_usaddv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11703extern rtx gen_sse2_sssubv16qi3 (rtx, rtx, rtx);
11704extern rtx gen_sse2_sssubv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11705extern rtx gen_sse2_ussubv16qi3 (rtx, rtx, rtx);
11706extern rtx gen_sse2_ussubv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11707extern rtx gen_avx512bw_ssaddv32hi3 (rtx, rtx, rtx);
11708extern rtx gen_avx512bw_ssaddv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11709extern rtx gen_avx512bw_usaddv32hi3 (rtx, rtx, rtx);
11710extern rtx gen_avx512bw_usaddv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11711extern rtx gen_avx512bw_sssubv32hi3 (rtx, rtx, rtx);
11712extern rtx gen_avx512bw_sssubv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11713extern rtx gen_avx512bw_ussubv32hi3 (rtx, rtx, rtx);
11714extern rtx gen_avx512bw_ussubv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11715extern rtx gen_avx2_ssaddv16hi3 (rtx, rtx, rtx);
11716extern rtx gen_avx2_ssaddv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11717extern rtx gen_avx2_usaddv16hi3 (rtx, rtx, rtx);
11718extern rtx gen_avx2_usaddv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11719extern rtx gen_avx2_sssubv16hi3 (rtx, rtx, rtx);
11720extern rtx gen_avx2_sssubv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11721extern rtx gen_avx2_ussubv16hi3 (rtx, rtx, rtx);
11722extern rtx gen_avx2_ussubv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11723extern rtx gen_sse2_ssaddv8hi3 (rtx, rtx, rtx);
11724extern rtx gen_sse2_ssaddv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11725extern rtx gen_sse2_usaddv8hi3 (rtx, rtx, rtx);
11726extern rtx gen_sse2_usaddv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11727extern rtx gen_sse2_sssubv8hi3 (rtx, rtx, rtx);
11728extern rtx gen_sse2_sssubv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11729extern rtx gen_sse2_ussubv8hi3 (rtx, rtx, rtx);
11730extern rtx gen_sse2_ussubv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11731extern rtx gen_mulv64qi3 (rtx, rtx, rtx);
11732extern rtx gen_mulv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
11733extern rtx gen_mulv32qi3 (rtx, rtx, rtx);
11734extern rtx gen_mulv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
11735extern rtx gen_mulv16qi3 (rtx, rtx, rtx);
11736extern rtx gen_mulv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
11737extern rtx gen_mulv32hi3 (rtx, rtx, rtx);
11738extern rtx gen_mulv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
11739extern rtx gen_mulv16hi3 (rtx, rtx, rtx);
11740extern rtx gen_mulv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
11741extern rtx gen_mulv8hi3 (rtx, rtx, rtx);
11742extern rtx gen_mulv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
11743extern rtx gen_smulv32hi3_highpart (rtx, rtx, rtx);
11744extern rtx gen_smulv32hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11745extern rtx gen_umulv32hi3_highpart (rtx, rtx, rtx);
11746extern rtx gen_umulv32hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11747extern rtx gen_smulv16hi3_highpart (rtx, rtx, rtx);
11748extern rtx gen_smulv16hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11749extern rtx gen_umulv16hi3_highpart (rtx, rtx, rtx);
11750extern rtx gen_umulv16hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11751extern rtx gen_smulv8hi3_highpart (rtx, rtx, rtx);
11752extern rtx gen_smulv8hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11753extern rtx gen_umulv8hi3_highpart (rtx, rtx, rtx);
11754extern rtx gen_umulv8hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
11755extern rtx gen_vec_widen_umult_even_v16si (rtx, rtx, rtx);
11756extern rtx gen_vec_widen_umult_even_v16si_mask (rtx, rtx, rtx, rtx, rtx);
11757extern rtx gen_vec_widen_umult_even_v8si (rtx, rtx, rtx);
11758extern rtx gen_vec_widen_umult_even_v8si_mask (rtx, rtx, rtx, rtx, rtx);
11759extern rtx gen_vec_widen_umult_even_v4si (rtx, rtx, rtx);
11760extern rtx gen_vec_widen_umult_even_v4si_mask (rtx, rtx, rtx, rtx, rtx);
11761extern rtx gen_vec_widen_smult_even_v16si (rtx, rtx, rtx);
11762extern rtx gen_vec_widen_smult_even_v16si_mask (rtx, rtx, rtx, rtx, rtx);
11763extern rtx gen_vec_widen_smult_even_v8si (rtx, rtx, rtx);
11764extern rtx gen_vec_widen_smult_even_v8si_mask (rtx, rtx, rtx, rtx, rtx);
11765extern rtx gen_sse4_1_mulv2siv2di3 (rtx, rtx, rtx);
11766extern rtx gen_sse4_1_mulv2siv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11767extern rtx gen_avx2_pmaddwd (rtx, rtx, rtx);
11768extern rtx gen_sse2_pmaddwd (rtx, rtx, rtx);
11769extern rtx gen_mulv16si3 (rtx, rtx, rtx);
11770extern rtx gen_mulv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11771extern rtx gen_mulv8si3 (rtx, rtx, rtx);
11772extern rtx gen_mulv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11773extern rtx gen_mulv4si3 (rtx, rtx, rtx);
11774extern rtx gen_mulv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11775extern rtx gen_mulv8di3 (rtx, rtx, rtx);
11776extern rtx gen_mulv4di3 (rtx, rtx, rtx);
11777extern rtx gen_mulv2di3 (rtx, rtx, rtx);
11778extern rtx gen_vec_widen_smult_hi_v32qi (rtx, rtx, rtx);
11779extern rtx gen_vec_widen_umult_hi_v32qi (rtx, rtx, rtx);
11780extern rtx gen_vec_widen_smult_hi_v16qi (rtx, rtx, rtx);
11781extern rtx gen_vec_widen_umult_hi_v16qi (rtx, rtx, rtx);
11782extern rtx gen_vec_widen_smult_hi_v16hi (rtx, rtx, rtx);
11783extern rtx gen_vec_widen_umult_hi_v16hi (rtx, rtx, rtx);
11784extern rtx gen_vec_widen_smult_hi_v8hi (rtx, rtx, rtx);
11785extern rtx gen_vec_widen_umult_hi_v8hi (rtx, rtx, rtx);
11786extern rtx gen_vec_widen_smult_hi_v8si (rtx, rtx, rtx);
11787extern rtx gen_vec_widen_umult_hi_v8si (rtx, rtx, rtx);
11788extern rtx gen_vec_widen_smult_hi_v4si (rtx, rtx, rtx);
11789extern rtx gen_vec_widen_umult_hi_v4si (rtx, rtx, rtx);
11790extern rtx gen_vec_widen_smult_lo_v32qi (rtx, rtx, rtx);
11791extern rtx gen_vec_widen_umult_lo_v32qi (rtx, rtx, rtx);
11792extern rtx gen_vec_widen_smult_lo_v16qi (rtx, rtx, rtx);
11793extern rtx gen_vec_widen_umult_lo_v16qi (rtx, rtx, rtx);
11794extern rtx gen_vec_widen_smult_lo_v16hi (rtx, rtx, rtx);
11795extern rtx gen_vec_widen_umult_lo_v16hi (rtx, rtx, rtx);
11796extern rtx gen_vec_widen_smult_lo_v8hi (rtx, rtx, rtx);
11797extern rtx gen_vec_widen_umult_lo_v8hi (rtx, rtx, rtx);
11798extern rtx gen_vec_widen_smult_lo_v8si (rtx, rtx, rtx);
11799extern rtx gen_vec_widen_umult_lo_v8si (rtx, rtx, rtx);
11800extern rtx gen_vec_widen_smult_lo_v4si (rtx, rtx, rtx);
11801extern rtx gen_vec_widen_umult_lo_v4si (rtx, rtx, rtx);
11802extern rtx gen_vec_widen_smult_even_v4si (rtx, rtx, rtx);
11803extern rtx gen_vec_widen_smult_odd_v16si (rtx, rtx, rtx);
11804extern rtx gen_vec_widen_umult_odd_v16si (rtx, rtx, rtx);
11805extern rtx gen_vec_widen_smult_odd_v8si (rtx, rtx, rtx);
11806extern rtx gen_vec_widen_umult_odd_v8si (rtx, rtx, rtx);
11807extern rtx gen_vec_widen_smult_odd_v4si (rtx, rtx, rtx);
11808extern rtx gen_vec_widen_umult_odd_v4si (rtx, rtx, rtx);
11809extern rtx gen_sdot_prodv32hi (rtx, rtx, rtx, rtx);
11810extern rtx gen_sdot_prodv16hi (rtx, rtx, rtx, rtx);
11811extern rtx gen_sdot_prodv8hi (rtx, rtx, rtx, rtx);
11812extern rtx gen_sdot_prodv4si (rtx, rtx, rtx, rtx);
11813extern rtx gen_usadv16qi (rtx, rtx, rtx, rtx);
11814extern rtx gen_usadv32qi (rtx, rtx, rtx, rtx);
11815extern rtx gen_vec_shl_v16qi (rtx, rtx, rtx);
11816extern rtx gen_vec_shl_v8hi (rtx, rtx, rtx);
11817extern rtx gen_vec_shl_v4si (rtx, rtx, rtx);
11818extern rtx gen_vec_shl_v2di (rtx, rtx, rtx);
11819extern rtx gen_vec_shr_v16qi (rtx, rtx, rtx);
11820extern rtx gen_vec_shr_v8hi (rtx, rtx, rtx);
11821extern rtx gen_vec_shr_v4si (rtx, rtx, rtx);
11822extern rtx gen_vec_shr_v2di (rtx, rtx, rtx);
11823extern rtx gen_smaxv32qi3 (rtx, rtx, rtx);
11824extern rtx gen_sminv32qi3 (rtx, rtx, rtx);
11825extern rtx gen_umaxv32qi3 (rtx, rtx, rtx);
11826extern rtx gen_uminv32qi3 (rtx, rtx, rtx);
11827extern rtx gen_smaxv16hi3 (rtx, rtx, rtx);
11828extern rtx gen_sminv16hi3 (rtx, rtx, rtx);
11829extern rtx gen_umaxv16hi3 (rtx, rtx, rtx);
11830extern rtx gen_uminv16hi3 (rtx, rtx, rtx);
11831extern rtx gen_smaxv8si3 (rtx, rtx, rtx);
11832extern rtx gen_sminv8si3 (rtx, rtx, rtx);
11833extern rtx gen_umaxv8si3 (rtx, rtx, rtx);
11834extern rtx gen_uminv8si3 (rtx, rtx, rtx);
11835extern rtx gen_smaxv64qi3 (rtx, rtx, rtx);
11836extern rtx gen_sminv64qi3 (rtx, rtx, rtx);
11837extern rtx gen_umaxv64qi3 (rtx, rtx, rtx);
11838extern rtx gen_uminv64qi3 (rtx, rtx, rtx);
11839extern rtx gen_smaxv32hi3 (rtx, rtx, rtx);
11840extern rtx gen_sminv32hi3 (rtx, rtx, rtx);
11841extern rtx gen_umaxv32hi3 (rtx, rtx, rtx);
11842extern rtx gen_uminv32hi3 (rtx, rtx, rtx);
11843extern rtx gen_smaxv16si3 (rtx, rtx, rtx);
11844extern rtx gen_sminv16si3 (rtx, rtx, rtx);
11845extern rtx gen_umaxv16si3 (rtx, rtx, rtx);
11846extern rtx gen_uminv16si3 (rtx, rtx, rtx);
11847extern rtx gen_smaxv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11848extern rtx gen_sminv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11849extern rtx gen_umaxv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11850extern rtx gen_uminv16si3_mask (rtx, rtx, rtx, rtx, rtx);
11851extern rtx gen_smaxv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11852extern rtx gen_sminv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11853extern rtx gen_umaxv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11854extern rtx gen_uminv8si3_mask (rtx, rtx, rtx, rtx, rtx);
11855extern rtx gen_smaxv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11856extern rtx gen_sminv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11857extern rtx gen_umaxv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11858extern rtx gen_uminv4si3_mask (rtx, rtx, rtx, rtx, rtx);
11859extern rtx gen_smaxv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11860extern rtx gen_sminv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11861extern rtx gen_umaxv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11862extern rtx gen_uminv8di3_mask (rtx, rtx, rtx, rtx, rtx);
11863extern rtx gen_smaxv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11864extern rtx gen_sminv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11865extern rtx gen_umaxv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11866extern rtx gen_uminv4di3_mask (rtx, rtx, rtx, rtx, rtx);
11867extern rtx gen_smaxv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11868extern rtx gen_sminv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11869extern rtx gen_umaxv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11870extern rtx gen_uminv2di3_mask (rtx, rtx, rtx, rtx, rtx);
11871extern rtx gen_smaxv8di3 (rtx, rtx, rtx);
11872extern rtx gen_sminv8di3 (rtx, rtx, rtx);
11873extern rtx gen_umaxv8di3 (rtx, rtx, rtx);
11874extern rtx gen_uminv8di3 (rtx, rtx, rtx);
11875extern rtx gen_smaxv4di3 (rtx, rtx, rtx);
11876extern rtx gen_sminv4di3 (rtx, rtx, rtx);
11877extern rtx gen_umaxv4di3 (rtx, rtx, rtx);
11878extern rtx gen_uminv4di3 (rtx, rtx, rtx);
11879extern rtx gen_smaxv2di3 (rtx, rtx, rtx);
11880extern rtx gen_sminv2di3 (rtx, rtx, rtx);
11881extern rtx gen_umaxv2di3 (rtx, rtx, rtx);
11882extern rtx gen_uminv2di3 (rtx, rtx, rtx);
11883extern rtx gen_smaxv16qi3 (rtx, rtx, rtx);
11884extern rtx gen_sminv16qi3 (rtx, rtx, rtx);
11885extern rtx gen_smaxv8hi3 (rtx, rtx, rtx);
11886extern rtx gen_sminv8hi3 (rtx, rtx, rtx);
11887extern rtx gen_smaxv4si3 (rtx, rtx, rtx);
11888extern rtx gen_sminv4si3 (rtx, rtx, rtx);
11889extern rtx gen_umaxv16qi3 (rtx, rtx, rtx);
11890extern rtx gen_uminv16qi3 (rtx, rtx, rtx);
11891extern rtx gen_umaxv8hi3 (rtx, rtx, rtx);
11892extern rtx gen_uminv8hi3 (rtx, rtx, rtx);
11893extern rtx gen_umaxv4si3 (rtx, rtx, rtx);
11894extern rtx gen_uminv4si3 (rtx, rtx, rtx);
11895extern rtx gen_avx2_eqv32qi3 (rtx, rtx, rtx);
11896extern rtx gen_avx2_eqv16hi3 (rtx, rtx, rtx);
11897extern rtx gen_avx2_eqv8si3 (rtx, rtx, rtx);
11898extern rtx gen_avx2_eqv4di3 (rtx, rtx, rtx);
11899extern rtx gen_avx512bw_eqv64qi3 (rtx, rtx, rtx);
11900extern rtx gen_avx512bw_eqv64qi3_mask (rtx, rtx, rtx, rtx);
11901extern rtx gen_avx512vl_eqv16qi3 (rtx, rtx, rtx);
11902extern rtx gen_avx512vl_eqv16qi3_mask (rtx, rtx, rtx, rtx);
11903extern rtx gen_avx512vl_eqv32qi3 (rtx, rtx, rtx);
11904extern rtx gen_avx512vl_eqv32qi3_mask (rtx, rtx, rtx, rtx);
11905extern rtx gen_avx512bw_eqv32hi3 (rtx, rtx, rtx);
11906extern rtx gen_avx512bw_eqv32hi3_mask (rtx, rtx, rtx, rtx);
11907extern rtx gen_avx512vl_eqv16hi3 (rtx, rtx, rtx);
11908extern rtx gen_avx512vl_eqv16hi3_mask (rtx, rtx, rtx, rtx);
11909extern rtx gen_avx512vl_eqv8hi3 (rtx, rtx, rtx);
11910extern rtx gen_avx512vl_eqv8hi3_mask (rtx, rtx, rtx, rtx);
11911extern rtx gen_avx512f_eqv16si3 (rtx, rtx, rtx);
11912extern rtx gen_avx512f_eqv16si3_mask (rtx, rtx, rtx, rtx);
11913extern rtx gen_avx512vl_eqv8si3 (rtx, rtx, rtx);
11914extern rtx gen_avx512vl_eqv8si3_mask (rtx, rtx, rtx, rtx);
11915extern rtx gen_avx512vl_eqv4si3 (rtx, rtx, rtx);
11916extern rtx gen_avx512vl_eqv4si3_mask (rtx, rtx, rtx, rtx);
11917extern rtx gen_avx512f_eqv8di3 (rtx, rtx, rtx);
11918extern rtx gen_avx512f_eqv8di3_mask (rtx, rtx, rtx, rtx);
11919extern rtx gen_avx512vl_eqv4di3 (rtx, rtx, rtx);
11920extern rtx gen_avx512vl_eqv4di3_mask (rtx, rtx, rtx, rtx);
11921extern rtx gen_avx512vl_eqv2di3 (rtx, rtx, rtx);
11922extern rtx gen_avx512vl_eqv2di3_mask (rtx, rtx, rtx, rtx);
11923extern rtx gen_sse2_eqv16qi3 (rtx, rtx, rtx);
11924extern rtx gen_sse2_eqv8hi3 (rtx, rtx, rtx);
11925extern rtx gen_sse2_eqv4si3 (rtx, rtx, rtx);
11926extern rtx gen_sse4_1_eqv2di3 (rtx, rtx, rtx);
11927extern rtx gen_vcondv64qiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11928extern rtx gen_vcondv32hiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11929extern rtx gen_vcondv16siv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11930extern rtx gen_vcondv8div16si (rtx, rtx, rtx, rtx, rtx, rtx);
11931extern rtx gen_vcondv16sfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11932extern rtx gen_vcondv8dfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11933extern rtx gen_vcondv64qiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
11934extern rtx gen_vcondv32hiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
11935extern rtx gen_vcondv16siv8di (rtx, rtx, rtx, rtx, rtx, rtx);
11936extern rtx gen_vcondv8div8di (rtx, rtx, rtx, rtx, rtx, rtx);
11937extern rtx gen_vcondv16sfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
11938extern rtx gen_vcondv8dfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
11939extern rtx gen_vcondv64qiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11940extern rtx gen_vcondv32hiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11941extern rtx gen_vcondv16siv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11942extern rtx gen_vcondv8div32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11943extern rtx gen_vcondv16sfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11944extern rtx gen_vcondv8dfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
11945extern rtx gen_vcondv64qiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11946extern rtx gen_vcondv32hiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11947extern rtx gen_vcondv16siv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11948extern rtx gen_vcondv8div64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11949extern rtx gen_vcondv16sfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11950extern rtx gen_vcondv8dfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
11951extern rtx gen_vcondv32qiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11952extern rtx gen_vcondv32qiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11953extern rtx gen_vcondv32qiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
11954extern rtx gen_vcondv32qiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
11955extern rtx gen_vcondv16hiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11956extern rtx gen_vcondv16hiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11957extern rtx gen_vcondv16hiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
11958extern rtx gen_vcondv16hiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
11959extern rtx gen_vcondv8siv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11960extern rtx gen_vcondv8siv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11961extern rtx gen_vcondv8siv8si (rtx, rtx, rtx, rtx, rtx, rtx);
11962extern rtx gen_vcondv8siv4di (rtx, rtx, rtx, rtx, rtx, rtx);
11963extern rtx gen_vcondv4div32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11964extern rtx gen_vcondv4div16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11965extern rtx gen_vcondv4div8si (rtx, rtx, rtx, rtx, rtx, rtx);
11966extern rtx gen_vcondv4div4di (rtx, rtx, rtx, rtx, rtx, rtx);
11967extern rtx gen_vcondv8sfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11968extern rtx gen_vcondv8sfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11969extern rtx gen_vcondv8sfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
11970extern rtx gen_vcondv8sfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
11971extern rtx gen_vcondv4dfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
11972extern rtx gen_vcondv4dfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
11973extern rtx gen_vcondv4dfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
11974extern rtx gen_vcondv4dfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
11975extern rtx gen_vcondv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11976extern rtx gen_vcondv8hiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11977extern rtx gen_vcondv4siv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11978extern rtx gen_vcondv2div16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11979extern rtx gen_vcondv4sfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11980extern rtx gen_vcondv2dfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
11981extern rtx gen_vcondv16qiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11982extern rtx gen_vcondv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11983extern rtx gen_vcondv4siv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11984extern rtx gen_vcondv2div8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11985extern rtx gen_vcondv4sfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11986extern rtx gen_vcondv2dfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
11987extern rtx gen_vcondv16qiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
11988extern rtx gen_vcondv8hiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
11989extern rtx gen_vcondv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
11990extern rtx gen_vcondv2div4si (rtx, rtx, rtx, rtx, rtx, rtx);
11991extern rtx gen_vcondv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
11992extern rtx gen_vcondv2dfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
11993extern rtx gen_vcondv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
11994extern rtx gen_vcondv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
11995extern rtx gen_vconduv64qiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11996extern rtx gen_vconduv32hiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11997extern rtx gen_vconduv16siv16si (rtx, rtx, rtx, rtx, rtx, rtx);
11998extern rtx gen_vconduv8div16si (rtx, rtx, rtx, rtx, rtx, rtx);
11999extern rtx gen_vconduv16sfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
12000extern rtx gen_vconduv8dfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
12001extern rtx gen_vconduv64qiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12002extern rtx gen_vconduv32hiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12003extern rtx gen_vconduv16siv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12004extern rtx gen_vconduv8div8di (rtx, rtx, rtx, rtx, rtx, rtx);
12005extern rtx gen_vconduv16sfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12006extern rtx gen_vconduv8dfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12007extern rtx gen_vconduv64qiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12008extern rtx gen_vconduv32hiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12009extern rtx gen_vconduv16siv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12010extern rtx gen_vconduv8div32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12011extern rtx gen_vconduv16sfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12012extern rtx gen_vconduv8dfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
12013extern rtx gen_vconduv64qiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12014extern rtx gen_vconduv32hiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12015extern rtx gen_vconduv16siv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12016extern rtx gen_vconduv8div64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12017extern rtx gen_vconduv16sfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12018extern rtx gen_vconduv8dfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
12019extern rtx gen_vconduv32qiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12020extern rtx gen_vconduv32qiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12021extern rtx gen_vconduv32qiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12022extern rtx gen_vconduv32qiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12023extern rtx gen_vconduv16hiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12024extern rtx gen_vconduv16hiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12025extern rtx gen_vconduv16hiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12026extern rtx gen_vconduv16hiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12027extern rtx gen_vconduv8siv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12028extern rtx gen_vconduv8siv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12029extern rtx gen_vconduv8siv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12030extern rtx gen_vconduv8siv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12031extern rtx gen_vconduv4div32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12032extern rtx gen_vconduv4div16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12033extern rtx gen_vconduv4div8si (rtx, rtx, rtx, rtx, rtx, rtx);
12034extern rtx gen_vconduv4div4di (rtx, rtx, rtx, rtx, rtx, rtx);
12035extern rtx gen_vconduv8sfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12036extern rtx gen_vconduv8sfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12037extern rtx gen_vconduv8sfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12038extern rtx gen_vconduv8sfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12039extern rtx gen_vconduv4dfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
12040extern rtx gen_vconduv4dfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
12041extern rtx gen_vconduv4dfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12042extern rtx gen_vconduv4dfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12043extern rtx gen_vconduv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12044extern rtx gen_vconduv8hiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12045extern rtx gen_vconduv4siv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12046extern rtx gen_vconduv2div16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12047extern rtx gen_vconduv4sfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12048extern rtx gen_vconduv2dfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
12049extern rtx gen_vconduv16qiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12050extern rtx gen_vconduv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12051extern rtx gen_vconduv4siv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12052extern rtx gen_vconduv2div8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12053extern rtx gen_vconduv4sfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12054extern rtx gen_vconduv2dfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
12055extern rtx gen_vconduv16qiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12056extern rtx gen_vconduv8hiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12057extern rtx gen_vconduv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12058extern rtx gen_vconduv2div4si (rtx, rtx, rtx, rtx, rtx, rtx);
12059extern rtx gen_vconduv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12060extern rtx gen_vconduv2dfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12061extern rtx gen_vconduv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
12062extern rtx gen_vconduv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12063extern rtx gen_vcondeqv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
12064extern rtx gen_vcondeqv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12065extern rtx gen_vec_permv16qi (rtx, rtx, rtx, rtx);
12066extern rtx gen_vec_permv8hi (rtx, rtx, rtx, rtx);
12067extern rtx gen_vec_permv4si (rtx, rtx, rtx, rtx);
12068extern rtx gen_vec_permv2di (rtx, rtx, rtx, rtx);
12069extern rtx gen_vec_permv4sf (rtx, rtx, rtx, rtx);
12070extern rtx gen_vec_permv2df (rtx, rtx, rtx, rtx);
12071extern rtx gen_vec_permv32qi (rtx, rtx, rtx, rtx);
12072extern rtx gen_vec_permv16hi (rtx, rtx, rtx, rtx);
12073extern rtx gen_vec_permv8si (rtx, rtx, rtx, rtx);
12074extern rtx gen_vec_permv4di (rtx, rtx, rtx, rtx);
12075extern rtx gen_vec_permv8sf (rtx, rtx, rtx, rtx);
12076extern rtx gen_vec_permv4df (rtx, rtx, rtx, rtx);
12077extern rtx gen_vec_permv16sf (rtx, rtx, rtx, rtx);
12078extern rtx gen_vec_permv8df (rtx, rtx, rtx, rtx);
12079extern rtx gen_vec_permv16si (rtx, rtx, rtx, rtx);
12080extern rtx gen_vec_permv8di (rtx, rtx, rtx, rtx);
12081extern rtx gen_vec_permv32hi (rtx, rtx, rtx, rtx);
12082extern rtx gen_vec_permv64qi (rtx, rtx, rtx, rtx);
12083extern rtx gen_vec_perm_constv4sf (rtx, rtx, rtx, rtx);
12084extern rtx gen_vec_perm_constv4si (rtx, rtx, rtx, rtx);
12085extern rtx gen_vec_perm_constv2df (rtx, rtx, rtx, rtx);
12086extern rtx gen_vec_perm_constv2di (rtx, rtx, rtx, rtx);
12087extern rtx gen_vec_perm_constv16qi (rtx, rtx, rtx, rtx);
12088extern rtx gen_vec_perm_constv8hi (rtx, rtx, rtx, rtx);
12089extern rtx gen_vec_perm_constv8sf (rtx, rtx, rtx, rtx);
12090extern rtx gen_vec_perm_constv4df (rtx, rtx, rtx, rtx);
12091extern rtx gen_vec_perm_constv8si (rtx, rtx, rtx, rtx);
12092extern rtx gen_vec_perm_constv4di (rtx, rtx, rtx, rtx);
12093extern rtx gen_vec_perm_constv32qi (rtx, rtx, rtx, rtx);
12094extern rtx gen_vec_perm_constv16hi (rtx, rtx, rtx, rtx);
12095extern rtx gen_vec_perm_constv16si (rtx, rtx, rtx, rtx);
12096extern rtx gen_vec_perm_constv8di (rtx, rtx, rtx, rtx);
12097extern rtx gen_vec_perm_constv16sf (rtx, rtx, rtx, rtx);
12098extern rtx gen_vec_perm_constv8df (rtx, rtx, rtx, rtx);
12099extern rtx gen_vec_perm_constv32hi (rtx, rtx, rtx, rtx);
12100extern rtx gen_vec_perm_constv64qi (rtx, rtx, rtx, rtx);
12101extern rtx gen_one_cmplv16si2 (rtx, rtx);
12102extern rtx gen_one_cmplv8di2 (rtx, rtx);
12103extern rtx gen_one_cmplv64qi2 (rtx, rtx);
12104extern rtx gen_one_cmplv32qi2 (rtx, rtx);
12105extern rtx gen_one_cmplv16qi2 (rtx, rtx);
12106extern rtx gen_one_cmplv32hi2 (rtx, rtx);
12107extern rtx gen_one_cmplv16hi2 (rtx, rtx);
12108extern rtx gen_one_cmplv8hi2 (rtx, rtx);
12109extern rtx gen_one_cmplv8si2 (rtx, rtx);
12110extern rtx gen_one_cmplv4si2 (rtx, rtx);
12111extern rtx gen_one_cmplv4di2 (rtx, rtx);
12112extern rtx gen_one_cmplv2di2 (rtx, rtx);
12113extern rtx gen_avx512bw_andnotv64qi3 (rtx, rtx, rtx);
12114extern rtx gen_avx2_andnotv32qi3 (rtx, rtx, rtx);
12115extern rtx gen_sse2_andnotv16qi3 (rtx, rtx, rtx);
12116extern rtx gen_avx512bw_andnotv32hi3 (rtx, rtx, rtx);
12117extern rtx gen_avx2_andnotv16hi3 (rtx, rtx, rtx);
12118extern rtx gen_sse2_andnotv8hi3 (rtx, rtx, rtx);
12119extern rtx gen_avx512f_andnotv16si3 (rtx, rtx, rtx);
12120extern rtx gen_avx2_andnotv8si3 (rtx, rtx, rtx);
12121extern rtx gen_sse2_andnotv4si3 (rtx, rtx, rtx);
12122extern rtx gen_avx512f_andnotv8di3 (rtx, rtx, rtx);
12123extern rtx gen_avx2_andnotv4di3 (rtx, rtx, rtx);
12124extern rtx gen_sse2_andnotv2di3 (rtx, rtx, rtx);
12125extern rtx gen_avx512f_andnotv16si3_mask (rtx, rtx, rtx, rtx, rtx);
12126extern rtx gen_avx2_andnotv8si3_mask (rtx, rtx, rtx, rtx, rtx);
12127extern rtx gen_sse2_andnotv4si3_mask (rtx, rtx, rtx, rtx, rtx);
12128extern rtx gen_avx512f_andnotv8di3_mask (rtx, rtx, rtx, rtx, rtx);
12129extern rtx gen_avx2_andnotv4di3_mask (rtx, rtx, rtx, rtx, rtx);
12130extern rtx gen_sse2_andnotv2di3_mask (rtx, rtx, rtx, rtx, rtx);
12131extern rtx gen_avx512bw_andnotv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
12132extern rtx gen_sse2_andnotv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
12133extern rtx gen_avx2_andnotv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
12134extern rtx gen_avx512bw_andnotv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
12135extern rtx gen_avx2_andnotv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
12136extern rtx gen_sse2_andnotv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
12137extern rtx gen_andv16si3 (rtx, rtx, rtx);
12138extern rtx gen_iorv16si3 (rtx, rtx, rtx);
12139extern rtx gen_xorv16si3 (rtx, rtx, rtx);
12140extern rtx gen_andv8di3 (rtx, rtx, rtx);
12141extern rtx gen_iorv8di3 (rtx, rtx, rtx);
12142extern rtx gen_xorv8di3 (rtx, rtx, rtx);
12143extern rtx gen_andv64qi3 (rtx, rtx, rtx);
12144extern rtx gen_iorv64qi3 (rtx, rtx, rtx);
12145extern rtx gen_xorv64qi3 (rtx, rtx, rtx);
12146extern rtx gen_andv32qi3 (rtx, rtx, rtx);
12147extern rtx gen_iorv32qi3 (rtx, rtx, rtx);
12148extern rtx gen_xorv32qi3 (rtx, rtx, rtx);
12149extern rtx gen_andv16qi3 (rtx, rtx, rtx);
12150extern rtx gen_iorv16qi3 (rtx, rtx, rtx);
12151extern rtx gen_xorv16qi3 (rtx, rtx, rtx);
12152extern rtx gen_andv32hi3 (rtx, rtx, rtx);
12153extern rtx gen_iorv32hi3 (rtx, rtx, rtx);
12154extern rtx gen_xorv32hi3 (rtx, rtx, rtx);
12155extern rtx gen_andv16hi3 (rtx, rtx, rtx);
12156extern rtx gen_iorv16hi3 (rtx, rtx, rtx);
12157extern rtx gen_xorv16hi3 (rtx, rtx, rtx);
12158extern rtx gen_andv8hi3 (rtx, rtx, rtx);
12159extern rtx gen_iorv8hi3 (rtx, rtx, rtx);
12160extern rtx gen_xorv8hi3 (rtx, rtx, rtx);
12161extern rtx gen_andv8si3 (rtx, rtx, rtx);
12162extern rtx gen_iorv8si3 (rtx, rtx, rtx);
12163extern rtx gen_xorv8si3 (rtx, rtx, rtx);
12164extern rtx gen_andv4si3 (rtx, rtx, rtx);
12165extern rtx gen_iorv4si3 (rtx, rtx, rtx);
12166extern rtx gen_xorv4si3 (rtx, rtx, rtx);
12167extern rtx gen_andv4di3 (rtx, rtx, rtx);
12168extern rtx gen_iorv4di3 (rtx, rtx, rtx);
12169extern rtx gen_xorv4di3 (rtx, rtx, rtx);
12170extern rtx gen_andv2di3 (rtx, rtx, rtx);
12171extern rtx gen_iorv2di3 (rtx, rtx, rtx);
12172extern rtx gen_xorv2di3 (rtx, rtx, rtx);
12173extern rtx gen_vec_pack_trunc_v32hi (rtx, rtx, rtx);
12174extern rtx gen_vec_pack_trunc_v16hi (rtx, rtx, rtx);
12175extern rtx gen_vec_pack_trunc_v8hi (rtx, rtx, rtx);
12176extern rtx gen_vec_pack_trunc_v16si (rtx, rtx, rtx);
12177extern rtx gen_vec_pack_trunc_v8si (rtx, rtx, rtx);
12178extern rtx gen_vec_pack_trunc_v4si (rtx, rtx, rtx);
12179extern rtx gen_vec_pack_trunc_v8di (rtx, rtx, rtx);
12180extern rtx gen_vec_pack_trunc_v4di (rtx, rtx, rtx);
12181extern rtx gen_vec_pack_trunc_v2di (rtx, rtx, rtx);
12182extern rtx gen_vec_pack_trunc_qi (rtx, rtx, rtx);
12183extern rtx gen_vec_pack_trunc_hi (rtx, rtx, rtx);
12184extern rtx gen_vec_pack_trunc_si (rtx, rtx, rtx);
12185extern rtx gen_vec_interleave_highv32qi (rtx, rtx, rtx);
12186extern rtx gen_vec_interleave_highv16hi (rtx, rtx, rtx);
12187extern rtx gen_vec_interleave_highv8si (rtx, rtx, rtx);
12188extern rtx gen_vec_interleave_highv4di (rtx, rtx, rtx);
12189extern rtx gen_vec_interleave_lowv32qi (rtx, rtx, rtx);
12190extern rtx gen_vec_interleave_lowv16hi (rtx, rtx, rtx);
12191extern rtx gen_vec_interleave_lowv8si (rtx, rtx, rtx);
12192extern rtx gen_vec_interleave_lowv4di (rtx, rtx, rtx);
12193extern rtx gen_avx512dq_vinsertf64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12194extern rtx gen_avx512dq_vinserti64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12195extern rtx gen_avx512f_vinsertf32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12196extern rtx gen_avx512f_vinserti32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12197extern rtx gen_avx512dq_vinsertf32x8_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12198extern rtx gen_avx512dq_vinserti32x8_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12199extern rtx gen_avx512f_vinsertf64x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12200extern rtx gen_avx512f_vinserti64x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12201extern rtx gen_avx512dq_shuf_i64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12202extern rtx gen_avx512dq_shuf_f64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12203extern rtx gen_avx512f_shuf_f64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12204extern rtx gen_avx512f_shuf_i64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12205extern rtx gen_avx512vl_shuf_i32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12206extern rtx gen_avx512vl_shuf_f32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12207extern rtx gen_avx512f_shuf_f32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12208extern rtx gen_avx512f_shuf_i32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
12209extern rtx gen_avx512f_pshufdv3_mask (rtx, rtx, rtx, rtx, rtx);
12210extern rtx gen_avx512vl_pshufdv3_mask (rtx, rtx, rtx, rtx, rtx);
12211extern rtx gen_avx2_pshufdv3 (rtx, rtx, rtx);
12212extern rtx gen_avx512vl_pshufd_mask (rtx, rtx, rtx, rtx, rtx);
12213extern rtx gen_sse2_pshufd (rtx, rtx, rtx);
12214extern rtx gen_avx512vl_pshuflwv3_mask (rtx, rtx, rtx, rtx, rtx);
12215extern rtx gen_avx2_pshuflwv3 (rtx, rtx, rtx);
12216extern rtx gen_avx512vl_pshuflw_mask (rtx, rtx, rtx, rtx, rtx);
12217extern rtx gen_sse2_pshuflw (rtx, rtx, rtx);
12218extern rtx gen_avx2_pshufhwv3 (rtx, rtx, rtx);
12219extern rtx gen_avx512vl_pshufhwv3_mask (rtx, rtx, rtx, rtx, rtx);
12220extern rtx gen_avx512vl_pshufhw_mask (rtx, rtx, rtx, rtx, rtx);
12221extern rtx gen_sse2_pshufhw (rtx, rtx, rtx);
12222extern rtx gen_sse2_loadd (rtx, rtx);
12223extern rtx gen_vec_unpacks_lo_v64qi (rtx, rtx);
12224extern rtx gen_vec_unpacks_lo_v32qi (rtx, rtx);
12225extern rtx gen_vec_unpacks_lo_v16qi (rtx, rtx);
12226extern rtx gen_vec_unpacks_lo_v32hi (rtx, rtx);
12227extern rtx gen_vec_unpacks_lo_v16hi (rtx, rtx);
12228extern rtx gen_vec_unpacks_lo_v8hi (rtx, rtx);
12229extern rtx gen_vec_unpacks_lo_v16si (rtx, rtx);
12230extern rtx gen_vec_unpacks_lo_v8si (rtx, rtx);
12231extern rtx gen_vec_unpacks_lo_v4si (rtx, rtx);
12232extern rtx gen_vec_unpacks_hi_v64qi (rtx, rtx);
12233extern rtx gen_vec_unpacks_hi_v32qi (rtx, rtx);
12234extern rtx gen_vec_unpacks_hi_v16qi (rtx, rtx);
12235extern rtx gen_vec_unpacks_hi_v32hi (rtx, rtx);
12236extern rtx gen_vec_unpacks_hi_v16hi (rtx, rtx);
12237extern rtx gen_vec_unpacks_hi_v8hi (rtx, rtx);
12238extern rtx gen_vec_unpacks_hi_v16si (rtx, rtx);
12239extern rtx gen_vec_unpacks_hi_v8si (rtx, rtx);
12240extern rtx gen_vec_unpacks_hi_v4si (rtx, rtx);
12241extern rtx gen_vec_unpacku_lo_v64qi (rtx, rtx);
12242extern rtx gen_vec_unpacku_lo_v32qi (rtx, rtx);
12243extern rtx gen_vec_unpacku_lo_v16qi (rtx, rtx);
12244extern rtx gen_vec_unpacku_lo_v32hi (rtx, rtx);
12245extern rtx gen_vec_unpacku_lo_v16hi (rtx, rtx);
12246extern rtx gen_vec_unpacku_lo_v8hi (rtx, rtx);
12247extern rtx gen_vec_unpacku_lo_v16si (rtx, rtx);
12248extern rtx gen_vec_unpacku_lo_v8si (rtx, rtx);
12249extern rtx gen_vec_unpacku_lo_v4si (rtx, rtx);
12250extern rtx gen_vec_unpacks_lo_hi (rtx, rtx);
12251extern rtx gen_vec_unpacks_lo_si (rtx, rtx);
12252extern rtx gen_vec_unpacks_lo_di (rtx, rtx);
12253extern rtx gen_vec_unpacku_hi_v64qi (rtx, rtx);
12254extern rtx gen_vec_unpacku_hi_v32qi (rtx, rtx);
12255extern rtx gen_vec_unpacku_hi_v16qi (rtx, rtx);
12256extern rtx gen_vec_unpacku_hi_v32hi (rtx, rtx);
12257extern rtx gen_vec_unpacku_hi_v16hi (rtx, rtx);
12258extern rtx gen_vec_unpacku_hi_v8hi (rtx, rtx);
12259extern rtx gen_vec_unpacku_hi_v16si (rtx, rtx);
12260extern rtx gen_vec_unpacku_hi_v8si (rtx, rtx);
12261extern rtx gen_vec_unpacku_hi_v4si (rtx, rtx);
12262extern rtx gen_vec_unpacks_hi_hi (rtx, rtx);
12263extern rtx gen_vec_unpacks_hi_si (rtx, rtx);
12264extern rtx gen_vec_unpacks_hi_di (rtx, rtx);
12265extern rtx gen_avx512bw_uavgv64qi3 (rtx, rtx, rtx);
12266extern rtx gen_avx512bw_uavgv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
12267extern rtx gen_avx2_uavgv32qi3 (rtx, rtx, rtx);
12268extern rtx gen_avx2_uavgv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
12269extern rtx gen_sse2_uavgv16qi3 (rtx, rtx, rtx);
12270extern rtx gen_sse2_uavgv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
12271extern rtx gen_avx512bw_uavgv32hi3 (rtx, rtx, rtx);
12272extern rtx gen_avx512bw_uavgv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
12273extern rtx gen_avx2_uavgv16hi3 (rtx, rtx, rtx);
12274extern rtx gen_avx2_uavgv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
12275extern rtx gen_sse2_uavgv8hi3 (rtx, rtx, rtx);
12276extern rtx gen_sse2_uavgv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
12277extern rtx gen_sse2_maskmovdqu (rtx, rtx, rtx);
12278extern rtx gen_ssse3_pmulhrswv4hi3_mask (rtx, rtx, rtx, rtx, rtx);
12279extern rtx gen_ssse3_pmulhrswv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
12280extern rtx gen_avx2_pmulhrswv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
12281extern rtx gen_ssse3_pmulhrswv4hi3 (rtx, rtx, rtx);
12282extern rtx gen_ssse3_pmulhrswv8hi3 (rtx, rtx, rtx);
12283extern rtx gen_avx2_pmulhrswv16hi3 (rtx, rtx, rtx);
12284extern rtx gen_absv64qi2 (rtx, rtx);
12285extern rtx gen_absv32qi2 (rtx, rtx);
12286extern rtx gen_absv16qi2 (rtx, rtx);
12287extern rtx gen_absv32hi2 (rtx, rtx);
12288extern rtx gen_absv16hi2 (rtx, rtx);
12289extern rtx gen_absv8hi2 (rtx, rtx);
12290extern rtx gen_absv16si2 (rtx, rtx);
12291extern rtx gen_absv8si2 (rtx, rtx);
12292extern rtx gen_absv4si2 (rtx, rtx);
12293extern rtx gen_absv8di2 (rtx, rtx);
12294extern rtx gen_absv4di2 (rtx, rtx);
12295extern rtx gen_absv2di2 (rtx, rtx);
12296extern rtx gen_avx2_pblendw (rtx, rtx, rtx, rtx);
12297extern rtx gen_avx_roundps_sfix256 (rtx, rtx, rtx);
12298extern rtx gen_sse4_1_roundps_sfix (rtx, rtx, rtx);
12299extern rtx gen_avx512f_roundps512 (rtx, rtx, rtx);
12300extern rtx gen_avx512f_roundpd512 (rtx, rtx, rtx);
12301extern rtx gen_avx512f_roundps512_sfix (rtx, rtx, rtx);
12302extern rtx gen_avx512f_roundpd_vec_pack_sfix512 (rtx, rtx, rtx, rtx);
12303extern rtx gen_avx_roundpd_vec_pack_sfix256 (rtx, rtx, rtx, rtx);
12304extern rtx gen_sse4_1_roundpd_vec_pack_sfix (rtx, rtx, rtx, rtx);
12305extern rtx gen_roundv16sf2 (rtx, rtx);
12306extern rtx gen_roundv8sf2 (rtx, rtx);
12307extern rtx gen_roundv4sf2 (rtx, rtx);
12308extern rtx gen_roundv8df2 (rtx, rtx);
12309extern rtx gen_roundv4df2 (rtx, rtx);
12310extern rtx gen_roundv2df2 (rtx, rtx);
12311extern rtx gen_roundv16sf2_sfix (rtx, rtx);
12312extern rtx gen_roundv8sf2_sfix (rtx, rtx);
12313extern rtx gen_roundv4sf2_sfix (rtx, rtx);
12314extern rtx gen_roundv8df2_vec_pack_sfix (rtx, rtx, rtx);
12315extern rtx gen_roundv4df2_vec_pack_sfix (rtx, rtx, rtx);
12316extern rtx gen_roundv2df2_vec_pack_sfix (rtx, rtx, rtx);
12317extern rtx gen_avx512pf_gatherpfv16sisf (rtx, rtx, rtx, rtx, rtx);
12318extern rtx gen_avx512pf_gatherpfv8disf (rtx, rtx, rtx, rtx, rtx);
12319extern rtx gen_avx512pf_gatherpfv8sidf (rtx, rtx, rtx, rtx, rtx);
12320extern rtx gen_avx512pf_gatherpfv8didf (rtx, rtx, rtx, rtx, rtx);
12321extern rtx gen_avx512pf_scatterpfv16sisf (rtx, rtx, rtx, rtx, rtx);
12322extern rtx gen_avx512pf_scatterpfv8disf (rtx, rtx, rtx, rtx, rtx);
12323extern rtx gen_avx512pf_scatterpfv8sidf (rtx, rtx, rtx, rtx, rtx);
12324extern rtx gen_avx512pf_scatterpfv8didf (rtx, rtx, rtx, rtx, rtx);
12325extern rtx gen_rotlv16qi3 (rtx, rtx, rtx);
12326extern rtx gen_rotlv8hi3 (rtx, rtx, rtx);
12327extern rtx gen_rotlv4si3 (rtx, rtx, rtx);
12328extern rtx gen_rotlv2di3 (rtx, rtx, rtx);
12329extern rtx gen_rotrv16qi3 (rtx, rtx, rtx);
12330extern rtx gen_rotrv8hi3 (rtx, rtx, rtx);
12331extern rtx gen_rotrv4si3 (rtx, rtx, rtx);
12332extern rtx gen_rotrv2di3 (rtx, rtx, rtx);
12333extern rtx gen_vrotrv16qi3 (rtx, rtx, rtx);
12334extern rtx gen_vrotrv8hi3 (rtx, rtx, rtx);
12335extern rtx gen_vrotrv4si3 (rtx, rtx, rtx);
12336extern rtx gen_vrotrv2di3 (rtx, rtx, rtx);
12337extern rtx gen_vrotlv16qi3 (rtx, rtx, rtx);
12338extern rtx gen_vrotlv8hi3 (rtx, rtx, rtx);
12339extern rtx gen_vrotlv4si3 (rtx, rtx, rtx);
12340extern rtx gen_vrotlv2di3 (rtx, rtx, rtx);
12341extern rtx gen_vlshrv16qi3 (rtx, rtx, rtx);
12342extern rtx gen_vlshrv8hi3 (rtx, rtx, rtx);
12343extern rtx gen_vlshrv4si3 (rtx, rtx, rtx);
12344extern rtx gen_vlshrv2di3 (rtx, rtx, rtx);
12345extern rtx gen_vlshrv16si3 (rtx, rtx, rtx);
12346extern rtx gen_vlshrv8di3 (rtx, rtx, rtx);
12347extern rtx gen_vlshrv8si3 (rtx, rtx, rtx);
12348extern rtx gen_vlshrv4di3 (rtx, rtx, rtx);
12349extern rtx gen_vashrv8hi3 (rtx, rtx, rtx);
12350extern rtx gen_vashrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
12351extern rtx gen_vashrv16qi3 (rtx, rtx, rtx);
12352extern rtx gen_vashrv2di3 (rtx, rtx, rtx);
12353extern rtx gen_vashrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
12354extern rtx gen_vashrv4si3 (rtx, rtx, rtx);
12355extern rtx gen_vashrv16si3 (rtx, rtx, rtx);
12356extern rtx gen_vashrv8si3 (rtx, rtx, rtx);
12357extern rtx gen_vashlv16qi3 (rtx, rtx, rtx);
12358extern rtx gen_vashlv8hi3 (rtx, rtx, rtx);
12359extern rtx gen_vashlv4si3 (rtx, rtx, rtx);
12360extern rtx gen_vashlv2di3 (rtx, rtx, rtx);
12361extern rtx gen_vashlv16si3 (rtx, rtx, rtx);
12362extern rtx gen_vashlv8di3 (rtx, rtx, rtx);
12363extern rtx gen_vashlv8si3 (rtx, rtx, rtx);
12364extern rtx gen_vashlv4di3 (rtx, rtx, rtx);
12365extern rtx gen_ashlv64qi3 (rtx, rtx, rtx);
12366extern rtx gen_lshrv64qi3 (rtx, rtx, rtx);
12367extern rtx gen_ashrv64qi3 (rtx, rtx, rtx);
12368extern rtx gen_ashlv32qi3 (rtx, rtx, rtx);
12369extern rtx gen_lshrv32qi3 (rtx, rtx, rtx);
12370extern rtx gen_ashrv32qi3 (rtx, rtx, rtx);
12371extern rtx gen_ashlv16qi3 (rtx, rtx, rtx);
12372extern rtx gen_lshrv16qi3 (rtx, rtx, rtx);
12373extern rtx gen_ashrv16qi3 (rtx, rtx, rtx);
12374extern rtx gen_ashrv2di3 (rtx, rtx, rtx);
12375extern rtx gen_xop_vmfrczv4sf2 (rtx, rtx);
12376extern rtx gen_xop_vmfrczv2df2 (rtx, rtx);
12377extern rtx gen_avx_vzeroall (void);
12378extern rtx gen_avx2_permv4di (rtx, rtx, rtx);
12379extern rtx gen_avx2_permv4df (rtx, rtx, rtx);
12380extern rtx gen_avx512vl_permv4di_mask (rtx, rtx, rtx, rtx, rtx);
12381extern rtx gen_avx512vl_permv4df_mask (rtx, rtx, rtx, rtx, rtx);
12382extern rtx gen_avx512f_permv8df (rtx, rtx, rtx);
12383extern rtx gen_avx512f_permv8di (rtx, rtx, rtx);
12384extern rtx gen_avx512f_permv8df_mask (rtx, rtx, rtx, rtx, rtx);
12385extern rtx gen_avx512f_permv8di_mask (rtx, rtx, rtx, rtx, rtx);
12386extern rtx gen_avx512f_vpermilv8df (rtx, rtx, rtx);
12387extern rtx gen_avx512f_vpermilv8df_mask (rtx, rtx, rtx, rtx, rtx);
12388extern rtx gen_avx_vpermilv4df (rtx, rtx, rtx);
12389extern rtx gen_avx_vpermilv4df_mask (rtx, rtx, rtx, rtx, rtx);
12390extern rtx gen_avx_vpermilv2df (rtx, rtx, rtx);
12391extern rtx gen_avx_vpermilv2df_mask (rtx, rtx, rtx, rtx, rtx);
12392extern rtx gen_avx512f_vpermilv16sf (rtx, rtx, rtx);
12393extern rtx gen_avx512f_vpermilv16sf_mask (rtx, rtx, rtx, rtx, rtx);
12394extern rtx gen_avx_vpermilv8sf (rtx, rtx, rtx);
12395extern rtx gen_avx_vpermilv8sf_mask (rtx, rtx, rtx, rtx, rtx);
12396extern rtx gen_avx_vpermilv4sf (rtx, rtx, rtx);
12397extern rtx gen_avx_vpermilv4sf_mask (rtx, rtx, rtx, rtx, rtx);
12398extern rtx gen_avx512f_vpermi2varv16si3_maskz (rtx, rtx, rtx, rtx, rtx);
12399extern rtx gen_avx512f_vpermi2varv16sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12400extern rtx gen_avx512f_vpermi2varv8di3_maskz (rtx, rtx, rtx, rtx, rtx);
12401extern rtx gen_avx512f_vpermi2varv8df3_maskz (rtx, rtx, rtx, rtx, rtx);
12402extern rtx gen_avx512vl_vpermi2varv8si3_maskz (rtx, rtx, rtx, rtx, rtx);
12403extern rtx gen_avx512vl_vpermi2varv8sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12404extern rtx gen_avx512vl_vpermi2varv4di3_maskz (rtx, rtx, rtx, rtx, rtx);
12405extern rtx gen_avx512vl_vpermi2varv4df3_maskz (rtx, rtx, rtx, rtx, rtx);
12406extern rtx gen_avx512vl_vpermi2varv4si3_maskz (rtx, rtx, rtx, rtx, rtx);
12407extern rtx gen_avx512vl_vpermi2varv4sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12408extern rtx gen_avx512vl_vpermi2varv2di3_maskz (rtx, rtx, rtx, rtx, rtx);
12409extern rtx gen_avx512vl_vpermi2varv2df3_maskz (rtx, rtx, rtx, rtx, rtx);
12410extern rtx gen_avx512bw_vpermi2varv64qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12411extern rtx gen_avx512vl_vpermi2varv16qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12412extern rtx gen_avx512vl_vpermi2varv32qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12413extern rtx gen_avx512vl_vpermi2varv8hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12414extern rtx gen_avx512vl_vpermi2varv16hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12415extern rtx gen_avx512bw_vpermi2varv32hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12416extern rtx gen_avx512f_vpermt2varv16si3_maskz (rtx, rtx, rtx, rtx, rtx);
12417extern rtx gen_avx512f_vpermt2varv16sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12418extern rtx gen_avx512f_vpermt2varv8di3_maskz (rtx, rtx, rtx, rtx, rtx);
12419extern rtx gen_avx512f_vpermt2varv8df3_maskz (rtx, rtx, rtx, rtx, rtx);
12420extern rtx gen_avx512vl_vpermt2varv8si3_maskz (rtx, rtx, rtx, rtx, rtx);
12421extern rtx gen_avx512vl_vpermt2varv8sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12422extern rtx gen_avx512vl_vpermt2varv4di3_maskz (rtx, rtx, rtx, rtx, rtx);
12423extern rtx gen_avx512vl_vpermt2varv4df3_maskz (rtx, rtx, rtx, rtx, rtx);
12424extern rtx gen_avx512vl_vpermt2varv4si3_maskz (rtx, rtx, rtx, rtx, rtx);
12425extern rtx gen_avx512vl_vpermt2varv4sf3_maskz (rtx, rtx, rtx, rtx, rtx);
12426extern rtx gen_avx512vl_vpermt2varv2di3_maskz (rtx, rtx, rtx, rtx, rtx);
12427extern rtx gen_avx512vl_vpermt2varv2df3_maskz (rtx, rtx, rtx, rtx, rtx);
12428extern rtx gen_avx512bw_vpermt2varv64qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12429extern rtx gen_avx512vl_vpermt2varv16qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12430extern rtx gen_avx512vl_vpermt2varv32qi3_maskz (rtx, rtx, rtx, rtx, rtx);
12431extern rtx gen_avx512vl_vpermt2varv8hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12432extern rtx gen_avx512vl_vpermt2varv16hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12433extern rtx gen_avx512bw_vpermt2varv32hi3_maskz (rtx, rtx, rtx, rtx, rtx);
12434extern rtx gen_avx_vperm2f128v8si3 (rtx, rtx, rtx, rtx);
12435extern rtx gen_avx_vperm2f128v8sf3 (rtx, rtx, rtx, rtx);
12436extern rtx gen_avx_vperm2f128v4df3 (rtx, rtx, rtx, rtx);
12437extern rtx gen_avx512vl_vinsertv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12438extern rtx gen_avx512vl_vinsertv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
12439extern rtx gen_avx512vl_vinsertv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12440extern rtx gen_avx512vl_vinsertv4df (rtx, rtx, rtx, rtx, rtx, rtx);
12441extern rtx gen_avx_vinsertf128v32qi (rtx, rtx, rtx, rtx);
12442extern rtx gen_avx_vinsertf128v16hi (rtx, rtx, rtx, rtx);
12443extern rtx gen_avx_vinsertf128v8si (rtx, rtx, rtx, rtx);
12444extern rtx gen_avx_vinsertf128v4di (rtx, rtx, rtx, rtx);
12445extern rtx gen_avx_vinsertf128v8sf (rtx, rtx, rtx, rtx);
12446extern rtx gen_avx_vinsertf128v4df (rtx, rtx, rtx, rtx);
12447extern rtx gen_maskloadv4sfv4si (rtx, rtx, rtx);
12448extern rtx gen_maskloadv2dfv2di (rtx, rtx, rtx);
12449extern rtx gen_maskloadv8sfv8si (rtx, rtx, rtx);
12450extern rtx gen_maskloadv4dfv4di (rtx, rtx, rtx);
12451extern rtx gen_maskloadv4siv4si (rtx, rtx, rtx);
12452extern rtx gen_maskloadv2div2di (rtx, rtx, rtx);
12453extern rtx gen_maskloadv8siv8si (rtx, rtx, rtx);
12454extern rtx gen_maskloadv4div4di (rtx, rtx, rtx);
12455extern rtx gen_maskloadv16sihi (rtx, rtx, rtx);
12456extern rtx gen_maskloadv8siqi (rtx, rtx, rtx);
12457extern rtx gen_maskloadv4siqi (rtx, rtx, rtx);
12458extern rtx gen_maskloadv8diqi (rtx, rtx, rtx);
12459extern rtx gen_maskloadv4diqi (rtx, rtx, rtx);
12460extern rtx gen_maskloadv2diqi (rtx, rtx, rtx);
12461extern rtx gen_maskloadv16sfhi (rtx, rtx, rtx);
12462extern rtx gen_maskloadv8sfqi (rtx, rtx, rtx);
12463extern rtx gen_maskloadv4sfqi (rtx, rtx, rtx);
12464extern rtx gen_maskloadv8dfqi (rtx, rtx, rtx);
12465extern rtx gen_maskloadv4dfqi (rtx, rtx, rtx);
12466extern rtx gen_maskloadv2dfqi (rtx, rtx, rtx);
12467extern rtx gen_maskloadv64qidi (rtx, rtx, rtx);
12468extern rtx gen_maskloadv16qihi (rtx, rtx, rtx);
12469extern rtx gen_maskloadv32qisi (rtx, rtx, rtx);
12470extern rtx gen_maskloadv32hisi (rtx, rtx, rtx);
12471extern rtx gen_maskloadv16hihi (rtx, rtx, rtx);
12472extern rtx gen_maskloadv8hiqi (rtx, rtx, rtx);
12473extern rtx gen_maskstorev4sfv4si (rtx, rtx, rtx);
12474extern rtx gen_maskstorev2dfv2di (rtx, rtx, rtx);
12475extern rtx gen_maskstorev8sfv8si (rtx, rtx, rtx);
12476extern rtx gen_maskstorev4dfv4di (rtx, rtx, rtx);
12477extern rtx gen_maskstorev4siv4si (rtx, rtx, rtx);
12478extern rtx gen_maskstorev2div2di (rtx, rtx, rtx);
12479extern rtx gen_maskstorev8siv8si (rtx, rtx, rtx);
12480extern rtx gen_maskstorev4div4di (rtx, rtx, rtx);
12481extern rtx gen_maskstorev16sihi (rtx, rtx, rtx);
12482extern rtx gen_maskstorev8siqi (rtx, rtx, rtx);
12483extern rtx gen_maskstorev4siqi (rtx, rtx, rtx);
12484extern rtx gen_maskstorev8diqi (rtx, rtx, rtx);
12485extern rtx gen_maskstorev4diqi (rtx, rtx, rtx);
12486extern rtx gen_maskstorev2diqi (rtx, rtx, rtx);
12487extern rtx gen_maskstorev16sfhi (rtx, rtx, rtx);
12488extern rtx gen_maskstorev8sfqi (rtx, rtx, rtx);
12489extern rtx gen_maskstorev4sfqi (rtx, rtx, rtx);
12490extern rtx gen_maskstorev8dfqi (rtx, rtx, rtx);
12491extern rtx gen_maskstorev4dfqi (rtx, rtx, rtx);
12492extern rtx gen_maskstorev2dfqi (rtx, rtx, rtx);
12493extern rtx gen_maskstorev64qidi (rtx, rtx, rtx);
12494extern rtx gen_maskstorev16qihi (rtx, rtx, rtx);
12495extern rtx gen_maskstorev32qisi (rtx, rtx, rtx);
12496extern rtx gen_maskstorev32hisi (rtx, rtx, rtx);
12497extern rtx gen_maskstorev16hihi (rtx, rtx, rtx);
12498extern rtx gen_maskstorev8hiqi (rtx, rtx, rtx);
12499extern rtx gen_cbranchv4si4 (rtx, rtx, rtx, rtx);
12500extern rtx gen_cbranchv2di4 (rtx, rtx, rtx, rtx);
12501extern rtx gen_cbranchv8si4 (rtx, rtx, rtx, rtx);
12502extern rtx gen_cbranchv4di4 (rtx, rtx, rtx, rtx);
12503extern rtx gen_vec_initv32qi (rtx, rtx);
12504extern rtx gen_vec_initv16hi (rtx, rtx);
12505extern rtx gen_vec_initv8si (rtx, rtx);
12506extern rtx gen_vec_initv4di (rtx, rtx);
12507extern rtx gen_vec_initv8sf (rtx, rtx);
12508extern rtx gen_vec_initv4df (rtx, rtx);
12509extern rtx gen_vec_initv16si (rtx, rtx);
12510extern rtx gen_vec_initv16sf (rtx, rtx);
12511extern rtx gen_vec_initv8di (rtx, rtx);
12512extern rtx gen_vec_initv8df (rtx, rtx);
12513extern rtx gen_vec_initv32hi (rtx, rtx);
12514extern rtx gen_vec_initv64qi (rtx, rtx);
12515extern rtx gen_vcvtps2ph_mask (rtx, rtx, rtx, rtx, rtx);
12516extern rtx gen_vcvtps2ph (rtx, rtx, rtx);
12517extern rtx gen_avx2_gathersiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12518extern rtx gen_avx2_gathersiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
12519extern rtx gen_avx2_gathersiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12520extern rtx gen_avx2_gathersiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
12521extern rtx gen_avx2_gathersiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12522extern rtx gen_avx2_gathersiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
12523extern rtx gen_avx2_gathersiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12524extern rtx gen_avx2_gathersiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
12525extern rtx gen_avx2_gatherdiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12526extern rtx gen_avx2_gatherdiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
12527extern rtx gen_avx2_gatherdiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12528extern rtx gen_avx2_gatherdiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
12529extern rtx gen_avx2_gatherdiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12530extern rtx gen_avx2_gatherdiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
12531extern rtx gen_avx2_gatherdiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12532extern rtx gen_avx2_gatherdiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
12533extern rtx gen_avx512f_gathersiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
12534extern rtx gen_avx512f_gathersiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
12535extern rtx gen_avx512f_gathersiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12536extern rtx gen_avx512f_gathersiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
12537extern rtx gen_avx512vl_gathersiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12538extern rtx gen_avx512vl_gathersiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
12539extern rtx gen_avx512vl_gathersiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12540extern rtx gen_avx512vl_gathersiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
12541extern rtx gen_avx512vl_gathersiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12542extern rtx gen_avx512vl_gathersiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
12543extern rtx gen_avx512vl_gathersiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12544extern rtx gen_avx512vl_gathersiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
12545extern rtx gen_avx512f_gatherdiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
12546extern rtx gen_avx512f_gatherdiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
12547extern rtx gen_avx512f_gatherdiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
12548extern rtx gen_avx512f_gatherdiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
12549extern rtx gen_avx512vl_gatherdiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
12550extern rtx gen_avx512vl_gatherdiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
12551extern rtx gen_avx512vl_gatherdiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
12552extern rtx gen_avx512vl_gatherdiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
12553extern rtx gen_avx512vl_gatherdiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
12554extern rtx gen_avx512vl_gatherdiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
12555extern rtx gen_avx512vl_gatherdiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
12556extern rtx gen_avx512vl_gatherdiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
12557extern rtx gen_avx512f_scattersiv16si (rtx, rtx, rtx, rtx, rtx);
12558extern rtx gen_avx512f_scattersiv16sf (rtx, rtx, rtx, rtx, rtx);
12559extern rtx gen_avx512f_scattersiv8di (rtx, rtx, rtx, rtx, rtx);
12560extern rtx gen_avx512f_scattersiv8df (rtx, rtx, rtx, rtx, rtx);
12561extern rtx gen_avx512vl_scattersiv8si (rtx, rtx, rtx, rtx, rtx);
12562extern rtx gen_avx512vl_scattersiv8sf (rtx, rtx, rtx, rtx, rtx);
12563extern rtx gen_avx512vl_scattersiv4di (rtx, rtx, rtx, rtx, rtx);
12564extern rtx gen_avx512vl_scattersiv4df (rtx, rtx, rtx, rtx, rtx);
12565extern rtx gen_avx512vl_scattersiv4si (rtx, rtx, rtx, rtx, rtx);
12566extern rtx gen_avx512vl_scattersiv4sf (rtx, rtx, rtx, rtx, rtx);
12567extern rtx gen_avx512vl_scattersiv2di (rtx, rtx, rtx, rtx, rtx);
12568extern rtx gen_avx512vl_scattersiv2df (rtx, rtx, rtx, rtx, rtx);
12569extern rtx gen_avx512f_scatterdiv16si (rtx, rtx, rtx, rtx, rtx);
12570extern rtx gen_avx512f_scatterdiv16sf (rtx, rtx, rtx, rtx, rtx);
12571extern rtx gen_avx512f_scatterdiv8di (rtx, rtx, rtx, rtx, rtx);
12572extern rtx gen_avx512f_scatterdiv8df (rtx, rtx, rtx, rtx, rtx);
12573extern rtx gen_avx512vl_scatterdiv8si (rtx, rtx, rtx, rtx, rtx);
12574extern rtx gen_avx512vl_scatterdiv8sf (rtx, rtx, rtx, rtx, rtx);
12575extern rtx gen_avx512vl_scatterdiv4di (rtx, rtx, rtx, rtx, rtx);
12576extern rtx gen_avx512vl_scatterdiv4df (rtx, rtx, rtx, rtx, rtx);
12577extern rtx gen_avx512vl_scatterdiv4si (rtx, rtx, rtx, rtx, rtx);
12578extern rtx gen_avx512vl_scatterdiv4sf (rtx, rtx, rtx, rtx, rtx);
12579extern rtx gen_avx512vl_scatterdiv2di (rtx, rtx, rtx, rtx, rtx);
12580extern rtx gen_avx512vl_scatterdiv2df (rtx, rtx, rtx, rtx, rtx);
12581extern rtx gen_avx512f_expandv16si_maskz (rtx, rtx, rtx, rtx);
12582extern rtx gen_avx512f_expandv16sf_maskz (rtx, rtx, rtx, rtx);
12583extern rtx gen_avx512f_expandv8di_maskz (rtx, rtx, rtx, rtx);
12584extern rtx gen_avx512f_expandv8df_maskz (rtx, rtx, rtx, rtx);
12585extern rtx gen_avx512vl_expandv8si_maskz (rtx, rtx, rtx, rtx);
12586extern rtx gen_avx512vl_expandv8sf_maskz (rtx, rtx, rtx, rtx);
12587extern rtx gen_avx512vl_expandv4di_maskz (rtx, rtx, rtx, rtx);
12588extern rtx gen_avx512vl_expandv4df_maskz (rtx, rtx, rtx, rtx);
12589extern rtx gen_avx512vl_expandv4si_maskz (rtx, rtx, rtx, rtx);
12590extern rtx gen_avx512vl_expandv4sf_maskz (rtx, rtx, rtx, rtx);
12591extern rtx gen_avx512vl_expandv2di_maskz (rtx, rtx, rtx, rtx);
12592extern rtx gen_avx512vl_expandv2df_maskz (rtx, rtx, rtx, rtx);
12593extern rtx gen_vpamdd52huqv8di_maskz (rtx, rtx, rtx, rtx, rtx);
12594extern rtx gen_vpamdd52huqv4di_maskz (rtx, rtx, rtx, rtx, rtx);
12595extern rtx gen_vpamdd52huqv2di_maskz (rtx, rtx, rtx, rtx, rtx);
12596extern rtx gen_vpamdd52luqv8di_maskz (rtx, rtx, rtx, rtx, rtx);
12597extern rtx gen_vpamdd52luqv4di_maskz (rtx, rtx, rtx, rtx, rtx);
12598extern rtx gen_vpamdd52luqv2di_maskz (rtx, rtx, rtx, rtx, rtx);
12599extern rtx gen_movv64sf (rtx, rtx);
12600extern rtx gen_movv64si (rtx, rtx);
12601extern rtx gen_sse2_lfence (void);
12602extern rtx gen_sse_sfence (void);
12603extern rtx gen_sse2_mfence (void);
12604extern rtx gen_mem_thread_fence (rtx);
12605extern rtx gen_atomic_loadqi (rtx, rtx, rtx);
12606extern rtx gen_atomic_loadhi (rtx, rtx, rtx);
12607extern rtx gen_atomic_loadsi (rtx, rtx, rtx);
12608extern rtx gen_atomic_loaddi (rtx, rtx, rtx);
12609extern rtx gen_atomic_storeqi (rtx, rtx, rtx);
12610extern rtx gen_atomic_storehi (rtx, rtx, rtx);
12611extern rtx gen_atomic_storesi (rtx, rtx, rtx);
12612extern rtx gen_atomic_storedi (rtx, rtx, rtx);
12613extern rtx gen_atomic_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
12614extern rtx gen_atomic_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
12615extern rtx gen_atomic_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
12616extern rtx gen_atomic_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
12617extern rtx gen_atomic_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
12618extern rtx gen_atomic_bit_test_and_sethi (rtx, rtx, rtx, rtx, rtx);
12619extern rtx gen_atomic_bit_test_and_setsi (rtx, rtx, rtx, rtx, rtx);
12620extern rtx gen_atomic_bit_test_and_setdi (rtx, rtx, rtx, rtx, rtx);
12621extern rtx gen_atomic_bit_test_and_complementhi (rtx, rtx, rtx, rtx, rtx);
12622extern rtx gen_atomic_bit_test_and_complementsi (rtx, rtx, rtx, rtx, rtx);
12623extern rtx gen_atomic_bit_test_and_complementdi (rtx, rtx, rtx, rtx, rtx);
12624extern rtx gen_atomic_bit_test_and_resethi (rtx, rtx, rtx, rtx, rtx);
12625extern rtx gen_atomic_bit_test_and_resetsi (rtx, rtx, rtx, rtx, rtx);
12626extern rtx gen_atomic_bit_test_and_resetdi (rtx, rtx, rtx, rtx, rtx);
12627
12628#endif /* GCC_INSN_FLAGS_H */
12629